diff --git a/01-ai/Yi-6B-Chat/result_2023-12-27 01:03:40.json b/01-ai/Yi-6B-Chat/result_2023-12-27 01:03:40.json new file mode 100644 index 0000000000000000000000000000000000000000..7d0f07e750675add229e2a05904eaf302b31acbd --- /dev/null +++ b/01-ai/Yi-6B-Chat/result_2023-12-27 01:03:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2235494880546075, + "acc_stderr": 0.012174896631202614, + "acc_norm": 0.26621160409556316, + "acc_norm_stderr": 0.012915774781523216 + }, + "harness|ko_hellaswag|10": { + "acc": 0.30611431985660226, + "acc_stderr": 0.004599358920909541, + "acc_norm": 0.35222067317267475, + "acc_norm_stderr": 0.004766860907171539 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3537675606641124, + "acc_stderr": 0.017098184708161906, + "acc_norm": 0.3537675606641124, + "acc_norm_stderr": 0.017098184708161906 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231004, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972718, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972718 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484504, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484504 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.035212249088415824, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.035212249088415824 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094764, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094764 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38064516129032255, + "acc_stderr": 0.027621717832907046, + "acc_norm": 0.38064516129032255, + "acc_norm_stderr": 0.027621717832907046 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641086, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641086 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.0472457740573157, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.0472457740573157 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881563, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881563 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696545, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696545 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4577114427860697, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.4577114427860697, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.02516798233389414, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.02516798233389414 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361813, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361813 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470022, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470022 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.027002521034516475, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.027002521034516475 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.035729543331448094, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.035729543331448094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3779816513761468, + "acc_stderr": 0.02078918706672812, + "acc_norm": 0.3779816513761468, + "acc_norm_stderr": 0.02078918706672812 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.48412698412698413, + "acc_stderr": 0.04469881854072606, + "acc_norm": 0.48412698412698413, + "acc_norm_stderr": 0.04469881854072606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724556, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724556 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331161, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331161 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146291, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146291 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3200782268578879, + "acc_stderr": 0.011914791947638519, + "acc_norm": 0.3200782268578879, + "acc_norm_stderr": 0.011914791947638519 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247271, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247271 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512567, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512567 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323013, + "mc2": 0.48699251655132686, + "mc2_stderr": 0.016174272005682996 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33530106257378983, + "acc_stderr": 0.016230981232989827, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.016637917789798732 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "01-ai/Yi-6B-Chat", + "model_sha": "36326f9bc1c8020e0cf29ea830ee5e6679a66a23", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/01-ai/Yi-6B/result_2023-12-27 01:03:17.json b/01-ai/Yi-6B/result_2023-12-27 01:03:17.json new file mode 100644 index 0000000000000000000000000000000000000000..4f459f8f998f2f48ffb1f00fcbd2126cbec91194 --- /dev/null +++ b/01-ai/Yi-6B/result_2023-12-27 01:03:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2175767918088737, + "acc_stderr": 0.012057262020972502, + "acc_norm": 0.26109215017064846, + "acc_norm_stderr": 0.012835523909473855 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3021310495917148, + "acc_stderr": 0.00458243310963648, + "acc_norm": 0.35012945628360886, + "acc_norm_stderr": 0.004760354191370852 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39080459770114945, + "acc_stderr": 0.01744836606706253, + "acc_norm": 0.39080459770114945, + "acc_norm_stderr": 0.01744836606706253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.035212249088415824, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.035212249088415824 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929774, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929774 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34102564102564104, + "acc_stderr": 0.02403548967633507, + "acc_norm": 0.34102564102564104, + "acc_norm_stderr": 0.02403548967633507 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.027430866579973474, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.027430866579973474 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3660377358490566, + "acc_stderr": 0.029647813539365263, + "acc_norm": 0.3660377358490566, + "acc_norm_stderr": 0.029647813539365263 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562424, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303128, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.027272582849839792, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.027272582849839792 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.03499807276193338, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.03499807276193338 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3669724770642202, + "acc_stderr": 0.020664675659520536, + "acc_norm": 0.3669724770642202, + "acc_norm_stderr": 0.020664675659520536 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093092, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093092 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.02909767559946393, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.02909767559946393 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225612, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841196, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786713, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786713 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713616, + "mc2": 0.47599173122840593, + "mc2_stderr": 0.015773874222919516 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3305785123966942, + "acc_stderr": 0.0161734232988457, + "acc_norm": 0.40968122786304606, + "acc_norm_stderr": 0.01690756819221947 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "01-ai/Yi-6B", + "model_sha": "b881162e08d0fa65011cb53f2c51544e1b623112", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json b/42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json new file mode 100644 index 0000000000000000000000000000000000000000..019020d68e431f521fe326c7daf85eb7ef0be498 --- /dev/null +++ b/42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938165, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.014521226405627077 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41545508862776337, + "acc_stderr": 0.004917931778593191, + "acc_norm": 0.5571599283011353, + "acc_norm_stderr": 0.004957068377516512 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.017875748840242407, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.017875748840242407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361033, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361033 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.02836504154256457, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.02836504154256457 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416828, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416828 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.031204691225150013, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.031204691225150013 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.02428314052946728, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.02428314052946728 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280459, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280459 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651047, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112126, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112126 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4091743119266055, + "acc_stderr": 0.02108067026443373, + "acc_norm": 0.4091743119266055, + "acc_norm_stderr": 0.02108067026443373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483184, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483184 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0190709855896875, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0190709855896875 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320207, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320207 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347019, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347019 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000534, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.02533684856333237, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.02533684856333237 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.41350210970464135, + "acc_stderr": 0.03205649904851859, + "acc_norm": 0.41350210970464135, + "acc_norm_stderr": 0.03205649904851859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29335071707953064, + "acc_stderr": 0.011628520449582076, + "acc_norm": 0.29335071707953064, + "acc_norm_stderr": 0.011628520449582076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.033744993563193555, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.033744993563193555 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.4602391231259313, + "mc2_stderr": 0.015191570633369808 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama-2-ko-en-instruct-v1", + "model_sha": "aee07500d61a1d5d214cf0bc0040650957cf3da0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-dpo-13b-test3/result_2023-11-30 08:08:14.json b/42MARU/GenAI-llama2-ko-en-dpo-13b-test3/result_2023-11-30 08:08:14.json new file mode 100644 index 0000000000000000000000000000000000000000..230e3555a510960013fc3b9b4c895ff20d31ff66 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-dpo-13b-test3/result_2023-11-30 08:08:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4112627986348123, + "acc_stderr": 0.014379441068522077, + "acc_norm": 0.45733788395904434, + "acc_norm_stderr": 0.014558106543924067 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43288189603664606, + "acc_stderr": 0.004944620712318274, + "acc_norm": 0.5816570404301932, + "acc_norm_stderr": 0.004922789247319874 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.01778403453499242, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.01778403453499242 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.0424463323835323, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.0424463323835323 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.035094383488796295, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.035094383488796295 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.040131241954243856, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.040131241954243856 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.02524277098712617, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.02524277098712617 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655812, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655812 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.03874102859818082, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.03874102859818082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399419, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399419 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5926605504587156, + "acc_stderr": 0.02106598624441288, + "acc_norm": 0.5926605504587156, + "acc_norm_stderr": 0.02106598624441288 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.02830457667314111, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.02830457667314111 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.01997742260022747, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.01997742260022747 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163907, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163907 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105303, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3774445893089961, + "acc_stderr": 0.012380680911165804, + "acc_norm": 0.3774445893089961, + "acc_norm_stderr": 0.012380680911165804 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.0163226441829605, + "mc2": 0.4756188079524156, + "mc2_stderr": 0.015396392654893808 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5100354191263282, + "acc_stderr": 0.01718689128689406, + "acc_norm": 0.5832349468713105, + "acc_norm_stderr": 0.01695048914610882 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-dpo-13b-test3", + "model_sha": "d70fdfed2e0b43ac6715ee5ec24801fd2bd5c25d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-dpo-13b-v1/result_2023-11-18 17:03:07.json b/42MARU/GenAI-llama2-ko-en-dpo-13b-v1/result_2023-11-18 17:03:07.json new file mode 100644 index 0000000000000000000000000000000000000000..ac212570307bbc2d57f23c894a8c2edd8248bf1b --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-dpo-13b-v1/result_2023-11-18 17:03:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938165, + "acc_norm": 0.45819112627986347, + "acc_norm_stderr": 0.014560220308714702 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42561242780322645, + "acc_stderr": 0.004934250390879782, + "acc_norm": 0.569308902609042, + "acc_norm_stderr": 0.004941609820763589 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5210727969348659, + "acc_stderr": 0.01786407678621291, + "acc_norm": 0.5210727969348659, + "acc_norm_stderr": 0.01786407678621291 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540218, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540218 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830517, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830517 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.02807158890109185, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.02807158890109185 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.03032594578928611, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.03032594578928611 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523857, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523857 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413317, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413317 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.021432956203453316, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.021432956203453316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.01984828016840117, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.01984828016840117 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605593, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605593 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.02806499816704009, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.02806499816704009 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235922, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235922 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104074, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104074 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559693, + "mc2": 0.474366186048088, + "mc2_stderr": 0.01540967506791855 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.017189767032130817, + "acc_norm": 0.5525383707201889, + "acc_norm_stderr": 0.017095190301500574 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-dpo-13b-v1", + "model_sha": "13d027c0a2069284308f4992d67a202ac2e50b22", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-dpo-13b-v2/result_2023-11-19 10:36:38.json b/42MARU/GenAI-llama2-ko-en-dpo-13b-v2/result_2023-11-19 10:36:38.json new file mode 100644 index 0000000000000000000000000000000000000000..48002060015dc96a5a052b340d92cbd2be535dc3 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-dpo-13b-v2/result_2023-11-19 10:36:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938165, + "acc_norm": 0.46075085324232085, + "acc_norm_stderr": 0.014566303676636588 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42640908185620396, + "acc_stderr": 0.004935439955031694, + "acc_norm": 0.5706034654451304, + "acc_norm_stderr": 0.0049397843114489855 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5197956577266922, + "acc_stderr": 0.017865944827291633, + "acc_norm": 0.5197956577266922, + "acc_norm_stderr": 0.017865944827291633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894245, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.024503472557110946, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.024503472557110946 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.02807158890109185, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.02807158890109185 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.03158539157745636, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.03158539157745636 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.03032594578928611, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.03032594578928611 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523857, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523857 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.026918645383239015, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.026918645383239015 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.01984828016840117, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.01984828016840117 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605593, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605593 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235922, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235922 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31747066492829207, + "acc_stderr": 0.011888892068809309, + "acc_norm": 0.31747066492829207, + "acc_norm_stderr": 0.011888892068809309 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.47439440606323957, + "mc2_stderr": 0.015414552807155835 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.01718765819933674, + "acc_norm": 0.5548996458087367, + "acc_norm_stderr": 0.017086417431005464 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-dpo-13b-v2", + "model_sha": "6fd9c176286458a9e802d0955a243f7b538c8e1c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-20B-v1/result_2023-11-05 16:07:53.json b/42MARU/GenAI-llama2-ko-en-instruct-20B-v1/result_2023-11-05 16:07:53.json new file mode 100644 index 0000000000000000000000000000000000000000..8f478826335ca6cced97ce2a5200b6394a8e496a --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-20B-v1/result_2023-11-05 16:07:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3984641638225256, + "acc_stderr": 0.014306946052735563, + "acc_norm": 0.4616040955631399, + "acc_norm_stderr": 0.01456824555029636 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42162915753833896, + "acc_stderr": 0.004928105880776079, + "acc_norm": 0.5677155945030871, + "acc_norm_stderr": 0.004943809330692697 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5491698595146871, + "acc_stderr": 0.01779329757269903, + "acc_norm": 0.5491698595146871, + "acc_norm_stderr": 0.01779329757269903 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916748, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916748 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.033764582465095665, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.033764582465095665 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051622, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051622 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5024875621890548, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.5024875621890548, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5431192660550459, + "acc_stderr": 0.021357458785226224, + "acc_norm": 0.5431192660550459, + "acc_norm_stderr": 0.021357458785226224 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.02799672318063145, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.02799672318063145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.01976621199107307, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.01976621199107307 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265015, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265015 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29497206703910617, + "acc_stderr": 0.015251931579208185, + "acc_norm": 0.29497206703910617, + "acc_norm_stderr": 0.015251931579208185 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6919831223628692, + "acc_stderr": 0.030052389335605695, + "acc_norm": 0.6919831223628692, + "acc_norm_stderr": 0.030052389335605695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564639, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564639 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070262, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070262 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.458694749783158, + "mc2_stderr": 0.015135220490705375 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45336481700118064, + "acc_stderr": 0.017115418225226862, + "acc_norm": 0.564344746162928, + "acc_norm_stderr": 0.017047415229476313 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-20B-v1", + "model_sha": "4de05113ecc02aa2da28893d8e2827912ebe0d20", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json b/42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json new file mode 100644 index 0000000000000000000000000000000000000000..1f0fdffbf4b5df08259c8a13f05ba920a05cd5fb --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251095, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804243 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4190400318661621, + "acc_stderr": 0.0049239357498424945, + "acc_norm": 0.5560645289782912, + "acc_norm_stderr": 0.004958314114266494 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828061, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230172, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230172 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0242785680243077, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0242785680243077 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5559633027522936, + "acc_stderr": 0.021302621211654518, + "acc_norm": 0.5559633027522936, + "acc_norm_stderr": 0.021302621211654518 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225875, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225875 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088833, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088833 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260664, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260664 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280058, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280058 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741518, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.44227632802507094, + "mc2_stderr": 0.015242459306682204 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.017187658199336743, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v2-13b", + "model_sha": "9f429309fc6b939d08c659ab4666f6e80324dcd1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v3-13B/result_2023-11-01 18:54:40.json b/42MARU/GenAI-llama2-ko-en-instruct-v3-13B/result_2023-11-01 18:54:40.json new file mode 100644 index 0000000000000000000000000000000000000000..a66de01a7aaa411a9c8ebe3fe34e77f99cbae6ca --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v3-13B/result_2023-11-01 18:54:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180646, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4164509061939853, + "acc_stderr": 0.004919626380645517, + "acc_norm": 0.5536745668193587, + "acc_norm_stderr": 0.004960947388535101 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.038237270928823064, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.038237270928823064 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.017870847506081738, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.017870847506081738 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.0281739177617629, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.0281739177617629 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168284, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168284 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.030463656747340254, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.030463656747340254 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823017, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823017 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373056, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842424, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.02779476010500874, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.02779476010500874 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5229357798165137, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.5229357798165137, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089775, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409167, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409167 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.03027332507734575, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.03027332507734575 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3285528031290743, + "acc_stderr": 0.011996027247502912, + "acc_norm": 0.3285528031290743, + "acc_norm_stderr": 0.011996027247502912 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.035086373586305716, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.035086373586305716 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062789, + "mc2": 0.4486611820923937, + "mc2_stderr": 0.015416976946375454 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190192, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v3-13b", + "model_sha": "199c2113f09f153bce1ad7aac35e6e756a99b89b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v3-13B/result_2023-11-02 01:15:35.json b/42MARU/GenAI-llama2-ko-en-instruct-v3-13B/result_2023-11-02 01:15:35.json new file mode 100644 index 0000000000000000000000000000000000000000..4cbd5ed51a3e7af0d6c3d63fd38ee646065de7c5 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v3-13B/result_2023-11-02 01:15:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180646, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4164509061939853, + "acc_stderr": 0.004919626380645517, + "acc_norm": 0.5536745668193587, + "acc_norm_stderr": 0.004960947388535101 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.038237270928823064, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.038237270928823064 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.017870847506081738, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.017870847506081738 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.0281739177617629, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.0281739177617629 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168284, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168284 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.030463656747340254, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.030463656747340254 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823017, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823017 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373056, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842424, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.02779476010500874, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.02779476010500874 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5229357798165137, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.5229357798165137, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089775, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409167, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409167 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.03027332507734575, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.03027332507734575 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3285528031290743, + "acc_stderr": 0.011996027247502912, + "acc_norm": 0.3285528031290743, + "acc_norm_stderr": 0.011996027247502912 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.035086373586305716, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.035086373586305716 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062789, + "mc2": 0.44866578973581106, + "mc2_stderr": 0.015416926437342405 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190192, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v3-13B", + "model_sha": "199c2113f09f153bce1ad7aac35e6e756a99b89b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v4-13B/result_2023-11-09 09:34:14.json b/42MARU/GenAI-llama2-ko-en-instruct-v4-13B/result_2023-11-09 09:34:14.json new file mode 100644 index 0000000000000000000000000000000000000000..8e21dcbfd6f324033fc14a45506c2ee101f8548e --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v4-13B/result_2023-11-09 09:34:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4087030716723549, + "acc_stderr": 0.014365750345427006, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.01455594976049644 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43168691495717987, + "acc_stderr": 0.0049429906231311166, + "acc_norm": 0.5795658235411273, + "acc_norm_stderr": 0.0049261984839487115 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533246, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533246 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.0424463323835323, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.0424463323835323 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.035029757994130085, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.035029757994130085 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399419, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399419 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.02110912813341391, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.02110912813341391 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.0200176292142131, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.0200176292142131 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697625, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697625 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02952009569768775, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02952009569768775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105303, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3741851368970013, + "acc_stderr": 0.012359335618172063, + "acc_norm": 0.3741851368970013, + "acc_norm_stderr": 0.012359335618172063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3243574051407589, + "mc1_stderr": 0.01638797677964794, + "mc2": 0.4753344144954286, + "mc2_stderr": 0.015470233894001158 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.500590318772137, + "acc_stderr": 0.01719034212344859, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v4-13B", + "model_sha": "fabf605d23d96e548908ffe9f0ad49dae01c46f8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v5-13B/result_2023-11-16 17:37:54.json b/42MARU/GenAI-llama2-ko-en-instruct-v5-13B/result_2023-11-16 17:37:54.json new file mode 100644 index 0000000000000000000000000000000000000000..28ab0ab7f6dd0de1fbe25a5d1aa8496e63929e49 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v5-13B/result_2023-11-16 17:37:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979272, + "acc_norm": 0.43600682593856654, + "acc_norm_stderr": 0.014491225699230918 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4221270663214499, + "acc_stderr": 0.00492889189587429, + "acc_norm": 0.5567616012746465, + "acc_norm_stderr": 0.004957524197900418 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633944, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633944 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479637, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479637 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.031918633744784645, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.031918633744784645 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.024784316942156367, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.024784316942156367 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849734, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160667, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012386, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012386 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983576, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.01202012819598575, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.01202012819598575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4591418911312825, + "mc2_stderr": 0.015363002653584545 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.017187658199336736, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v5-13B", + "model_sha": "e625b2673e2a0839e7d3fc0f2a844e9966404678", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v6-13B/result_2023-11-27 02:12:26.json b/42MARU/GenAI-llama2-ko-en-instruct-v6-13B/result_2023-11-27 02:12:26.json new file mode 100644 index 0000000000000000000000000000000000000000..ca45303f7163cf60269b679775f5fff31b3374c7 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v6-13B/result_2023-11-27 02:12:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938215, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4272057359091814, + "acc_stderr": 0.004936616428922639, + "acc_norm": 0.5610436168094005, + "acc_norm_stderr": 0.004952454721934797 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5210727969348659, + "acc_stderr": 0.017864076786212903, + "acc_norm": 0.5210727969348659, + "acc_norm_stderr": 0.017864076786212903 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652629, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652629 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.031918633744784645, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.031918633744784645 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.02504919787604233, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.02504919787604233 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633345, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633345 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4421965317919075, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.4421965317919075, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583639, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583639 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.02833239748366427, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.02833239748366427 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114024, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114024 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.031546962856566295, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.031546962856566295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898428, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898428 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715945, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715945 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.01607750926613303, + "mc2": 0.45710797981768625, + "mc2_stderr": 0.015464643764155465 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46871310507674147, + "acc_stderr": 0.017156666859785456, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.017057753702160294 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v6-13B", + "model_sha": "f24326c48f4edb60bc3bdc186b65e0fcb9254c1e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v7-13B/result_2023-12-03 17:19:11.json b/42MARU/GenAI-llama2-ko-en-instruct-v7-13B/result_2023-12-03 17:19:11.json new file mode 100644 index 0000000000000000000000000000000000000000..5a1c4933b81421155b683d58e8a40826bdc4a3c5 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v7-13B/result_2023-12-03 17:19:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938213, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.014555949760496435 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42272455686118304, + "acc_stderr": 0.00492982833760698, + "acc_norm": 0.5592511451902011, + "acc_norm_stderr": 0.004954622308739005 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5146871008939975, + "acc_stderr": 0.017872248024429122, + "acc_norm": 0.5146871008939975, + "acc_norm_stderr": 0.017872248024429122 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863537, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863537 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.03979236637497411, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.03979236637497411 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38974358974358975, + "acc_stderr": 0.024726967886647074, + "acc_norm": 0.38974358974358975, + "acc_norm_stderr": 0.024726967886647074 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502737, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.03530235517334682, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.03530235517334682 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099522, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099522 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422708, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384486, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362233, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362233 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.028074158947600663, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.028074158947600663 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215923, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215923 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953195, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953195 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.02858270975389843, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.02858270975389843 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175364, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.011965311536571528, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.011965311536571528 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.4532384559135145, + "mc2_stderr": 0.015485047009493541 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5301062573789846, + "acc_norm_stderr": 0.017159163590170223 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v7-13B", + "model_sha": "40b2f1775ec5f92bfa8191fda6bb5f7c78564b3c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-platypus-13B-v2/result_2023-10-30 01:04:14.json b/42MARU/GenAI-llama2-ko-en-platypus-13B-v2/result_2023-10-30 01:04:14.json new file mode 100644 index 0000000000000000000000000000000000000000..fdfd90d8bd50e1fefe22f76e84d2924800b5f4e6 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-platypus-13B-v2/result_2023-10-30 01:04:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.01453714444428474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42013543118900615, + "acc_stderr": 0.00492571700809971, + "acc_norm": 0.5487950607448715, + "acc_norm_stderr": 0.004965963647210315 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.01787469866749135, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.01787469866749135 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231015, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231015 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.03777798822748018, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.03777798822748018 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539753, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539753 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557845, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.025497532639609553, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.025497532639609553 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.037336266553835096, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.037336266553835096 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972585, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972585 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836185, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836185 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518754, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518754 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355435, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320203, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625166, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625166 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459313, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459313 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606785, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606785 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.03883565977956928, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.03883565977956928 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834562, + "mc2": 0.44689474709496685, + "mc2_stderr": 0.015256070107718848 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5478158205430933, + "acc_stderr": 0.017111567130916796, + "acc_norm": 0.5962219598583235, + "acc_norm_stderr": 0.016869031540298632 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-platypus-13B-v2", + "model_sha": "1b4eb6319be99c113d17778ce2737acffe2a0fee", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-platypus-13B/result_2023-10-29 00:31:00.json b/42MARU/GenAI-llama2-ko-en-platypus-13B/result_2023-10-29 00:31:00.json new file mode 100644 index 0000000000000000000000000000000000000000..1ee41f8f43825a21a7caba67c5d46af076066d7c --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-platypus-13B/result_2023-10-29 00:31:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633832 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4166500697072296, + "acc_stderr": 0.00491996282220832, + "acc_norm": 0.5524795857398924, + "acc_norm_stderr": 0.004962220512548352 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5351213282247765, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.5351213282247765, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255099, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255099 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266236, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042328, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042328 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150275, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150275 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095932, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095932 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.032979866484738336, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.032979866484738336 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.036812296333943194, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.036812296333943194 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456602, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456602 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281335, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281335 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5522935779816514, + "acc_stderr": 0.02131975496242546, + "acc_norm": 0.5522935779816514, + "acc_norm_stderr": 0.02131975496242546 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.02852638345214264, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.02852638345214264 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281525, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281525 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000534, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.293854748603352, + "acc_stderr": 0.015235075776719616, + "acc_norm": 0.293854748603352, + "acc_norm_stderr": 0.015235075776719616 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.02725720260611495, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.02725720260611495 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.01202012819598576, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.01202012819598576 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4478444454695957, + "mc2_stderr": 0.015296142940086415 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5454545454545454, + "acc_stderr": 0.017119172208061504, + "acc_norm": 0.5938606847697757, + "acc_norm_stderr": 0.016884749503191396 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-platypus-13B", + "model_sha": "61d276d0715184790bae2979744f1ae7c0f451c0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json b/42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json new file mode 100644 index 0000000000000000000000000000000000000000..c2d8f20a5394134efba430fcd786264af3e7fc26 --- /dev/null +++ b/42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145678, + "acc_norm": 0.3839590443686007, + "acc_norm_stderr": 0.01421244498065189 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3975303724357698, + "acc_stderr": 0.004883871774350598, + "acc_norm": 0.5247958573989245, + "acc_norm_stderr": 0.004983641854351152 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3831417624521073, + "acc_stderr": 0.01738477419488563, + "acc_norm": 0.3831417624521073, + "acc_norm_stderr": 0.01738477419488563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412482, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412482 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.03777798822748017, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.03777798822748017 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.35691318327974275, + "acc_stderr": 0.027210420375934012, + "acc_norm": 0.35691318327974275, + "acc_norm_stderr": 0.027210420375934012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35858585858585856, + "acc_stderr": 0.034169036403915214, + "acc_norm": 0.35858585858585856, + "acc_norm_stderr": 0.034169036403915214 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936245, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936245 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533953, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533953 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.021916957709213803, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.021916957709213803 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04668408033024932, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04668408033024932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733545, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.03248577511578401, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.03248577511578401 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3283582089552239, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.3283582089552239, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184756, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184756 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33236994219653176, + "acc_stderr": 0.025361168749688225, + "acc_norm": 0.33236994219653176, + "acc_norm_stderr": 0.025361168749688225 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33641975308641975, + "acc_stderr": 0.026289734945952926, + "acc_norm": 0.33641975308641975, + "acc_norm_stderr": 0.026289734945952926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32642487046632124, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.32642487046632124, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3174311926605505, + "acc_stderr": 0.019957152198460497, + "acc_norm": 0.3174311926605505, + "acc_norm_stderr": 0.019957152198460497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.02736359328468495, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.02736359328468495 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.017917974069594726, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.017917974069594726 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605586, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605586 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.028666857790274648, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.028666857790274648 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301847, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27249022164276404, + "acc_stderr": 0.01137165829431153, + "acc_norm": 0.27249022164276404, + "acc_norm_stderr": 0.01137165829431153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.031145570659486782, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.031145570659486782 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.39805148377575406, + "mc2_stderr": 0.015027401787198838 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2396694214876033, + "acc_stderr": 0.014676495332267253, + "acc_norm": 0.31286894923258557, + "acc_norm_stderr": 0.015941010118302654 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/llama-2-ko-7b-instruct", + "model_sha": "3c590472282b5de4c76d846153db5f41b82c1b62", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json b/42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json new file mode 100644 index 0000000000000000000000000000000000000000..17bf35f8ccfc589b128222e311f97daf61999735 --- /dev/null +++ b/42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635474, + "acc_norm": 0.386518771331058, + "acc_norm_stderr": 0.014230084761910474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3835889265086636, + "acc_stderr": 0.00485265887677539, + "acc_norm": 0.5022903804023103, + "acc_norm_stderr": 0.004989729059957435 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.03743979825926401, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.03743979825926401 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326466, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326466 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.367816091954023, + "acc_stderr": 0.01724382889184626, + "acc_norm": 0.367816091954023, + "acc_norm_stderr": 0.01724382889184626 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996795, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996795 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.02951319662553935, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.02951319662553935 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031024, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031024 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.03304205087813653, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.03304205087813653 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2794871794871795, + "acc_stderr": 0.022752388839776823, + "acc_norm": 0.2794871794871795, + "acc_norm_stderr": 0.022752388839776823 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.02945486383529298, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.02945486383529298 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3225806451612903, + "acc_stderr": 0.026593084516572267, + "acc_norm": 0.3225806451612903, + "acc_norm_stderr": 0.026593084516572267 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5, + "acc_stderr": 0.03275608910402091, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03275608910402091 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3471698113207547, + "acc_stderr": 0.029300101705549652, + "acc_norm": 0.3471698113207547, + "acc_norm_stderr": 0.029300101705549652 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766118, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766118 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.03336767086567977, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.03336767086567977 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39800995024875624, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.39800995024875624, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554859, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554859 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470022, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470022 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.026725868809100793, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.026725868809100793 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.03458816042181005, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.03458816042181005 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3394495412844037, + "acc_stderr": 0.02030210934266235, + "acc_norm": 0.3394495412844037, + "acc_norm_stderr": 0.02030210934266235 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159607, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159607 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495165, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495165 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578728, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578728 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510927, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510927 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553977, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553977 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.03055531675557364, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.03055531675557364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4767932489451477, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.4767932489451477, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3089960886571056, + "acc_stderr": 0.01180172977723925, + "acc_norm": 0.3089960886571056, + "acc_norm_stderr": 0.01180172977723925 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.032702871814820816, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.032702871814820816 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.0364620496325381, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.0364620496325381 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.01517698502770769, + "mc2": 0.38056097212603235, + "mc2_stderr": 0.014936929596682727 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21605667060212513, + "acc_stderr": 0.014149496716043137, + "acc_norm": 0.29279811097992914, + "acc_norm_stderr": 0.015644823205401337 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/llama-2-ko-7b-instruction-v3", + "model_sha": "c0fea9cb31d4ae90aa2ed048f774a9000341b538", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json b/42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json new file mode 100644 index 0000000000000000000000000000000000000000..4db9a44cf2c11e6ee23ccc99e1d58f04a6bc6c12 --- /dev/null +++ b/42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3046075085324232, + "acc_stderr": 0.013449522109932492, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.014056207319068282 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3981278629755029, + "acc_stderr": 0.0048851164655502755, + "acc_norm": 0.5159330810595499, + "acc_norm_stderr": 0.004987247325495624 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150193, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150193 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.039154506304142495, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.039154506304142495 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2297872340425532, + "acc_stderr": 0.02750175294441242, + "acc_norm": 0.2297872340425532, + "acc_norm_stderr": 0.02750175294441242 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064536, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893944, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893944 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.1031390134529148, + "acc_stderr": 0.020412564289839272, + "acc_norm": 0.1031390134529148, + "acc_norm_stderr": 0.020412564289839272 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.40404040404040403, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.40404040404040403, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467122, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467122 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.18907563025210083, + "acc_stderr": 0.02543511943810536, + "acc_norm": 0.18907563025210083, + "acc_norm_stderr": 0.02543511943810536 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.020932445774463175, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.020932445774463175 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1921182266009852, + "acc_stderr": 0.027719315709614778, + "acc_norm": 0.1921182266009852, + "acc_norm_stderr": 0.027719315709614778 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332204, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332204 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.29056603773584905, + "acc_stderr": 0.027943219989337156, + "acc_norm": 0.29056603773584905, + "acc_norm_stderr": 0.027943219989337156 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.19402985074626866, + "acc_stderr": 0.027962677604768914, + "acc_norm": 0.19402985074626866, + "acc_norm_stderr": 0.027962677604768914 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641143, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641143 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240018, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.023788583551658537, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.023788583551658537 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178253, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178253 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3376146788990826, + "acc_stderr": 0.02027526598663891, + "acc_norm": 0.3376146788990826, + "acc_norm_stderr": 0.02027526598663891 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238126, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.025553169991826524, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.025553169991826524 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516302, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516302 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810537, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810537 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.01774089950917779, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.01774089950917779 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.024847921358063962, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.024847921358063962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372937, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24472573839662448, + "acc_stderr": 0.027985699387036416, + "acc_norm": 0.24472573839662448, + "acc_norm_stderr": 0.027985699387036416 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.011186109046564608, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.011186109046564608 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.031660096793998116, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.031660096793998116 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2, + "acc_stderr": 0.03123475237772118, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03123475237772118 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627887, + "mc2": 0.4515720476496737, + "mc2_stderr": 0.015493161984611252 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2680047225501771, + "acc_stderr": 0.015227905796335147, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.016605801289212598 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/polyglot-ko-12.8b-instruct", + "model_sha": "a8354bcedc167e8e1f7dac8a347bf4b61d9c9bf0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json b/42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json new file mode 100644 index 0000000000000000000000000000000000000000..a0541d1d294503564b350859ce432296bf101ded --- /dev/null +++ b/42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3643344709897611, + "acc_stderr": 0.014063260279882417, + "acc_norm": 0.4112627986348123, + "acc_norm_stderr": 0.014379441068522084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3732324238199562, + "acc_stderr": 0.004826746160830189, + "acc_norm": 0.4751045608444533, + "acc_norm_stderr": 0.004983592410934169 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491355, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894245, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.03166098891888078, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.03166098891888078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683526, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683526 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.037124548537213684, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.037124548537213684 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.028408302020332687, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.028408302020332687 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797609, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797609 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.01927099870822398, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.01927099870822398 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.040073418097558065, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.040073418097558065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.031546962856566295, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.031546962856566295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3106145251396648, + "acc_stderr": 0.015476515438005566, + "acc_norm": 0.3106145251396648, + "acc_norm_stderr": 0.015476515438005566 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32333767926988266, + "acc_stderr": 0.011946565758447202, + "acc_norm": 0.32333767926988266, + "acc_norm_stderr": 0.011946565758447202 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35006119951040393, + "mc1_stderr": 0.01669794942015103, + "mc2": 0.5148844380994511, + "mc2_stderr": 0.015947695748354234 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42857142857142855, + "acc_stderr": 0.017014038119297473, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.0170725258755631 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/sitebunny-13b", + "model_sha": "15c8578d2be688d6b03ed2076658865bb8752673", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json b/42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json new file mode 100644 index 0000000000000000000000000000000000000000..332095c02e6b1b6bfc3e63c9bb110307e1733c01 --- /dev/null +++ b/42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2636518771331058, + "acc_stderr": 0.01287592915129705, + "acc_norm": 0.32593856655290104, + "acc_norm_stderr": 0.013697432466693242 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3563035251941844, + "acc_stderr": 0.004779276329704052, + "acc_norm": 0.4473212507468632, + "acc_norm_stderr": 0.004962010338226348 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22094508301404853, + "acc_stderr": 0.014836205167333574, + "acc_norm": 0.22094508301404853, + "acc_norm_stderr": 0.014836205167333574 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.0335567721631314, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.0335567721631314 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.02951319662553935, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.02951319662553935 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824664, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824664 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2347266881028939, + "acc_stderr": 0.024071805887677045, + "acc_norm": 0.2347266881028939, + "acc_norm_stderr": 0.024071805887677045 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2242152466367713, + "acc_stderr": 0.027991534258519527, + "acc_norm": 0.2242152466367713, + "acc_norm_stderr": 0.027991534258519527 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124484, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124484 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.034165204477475494, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.034165204477475494 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886835, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886835 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.02435958146539698, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.02435958146539698 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594525, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594525 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20085470085470086, + "acc_stderr": 0.02624677294689047, + "acc_norm": 0.20085470085470086, + "acc_norm_stderr": 0.02624677294689047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889051985, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889051985 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415426, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415426 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.022497230190967547, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.022497230190967547 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.025171041915309684, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.025171041915309684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.03458816042181006, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.03458816042181006 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24954128440366974, + "acc_stderr": 0.018553897629501614, + "acc_norm": 0.24954128440366974, + "acc_norm_stderr": 0.018553897629501614 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27346938775510204, + "acc_stderr": 0.02853556033712845, + "acc_norm": 0.27346938775510204, + "acc_norm_stderr": 0.02853556033712845 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965833, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965833 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2561929595827901, + "acc_stderr": 0.011149173153110583, + "acc_norm": 0.2561929595827901, + "acc_norm_stderr": 0.011149173153110583 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715006, + "mc2": 0.40367736123530334, + "mc2_stderr": 0.014824402657107816 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2396694214876033, + "acc_stderr": 0.014676495332267253, + "acc_norm": 0.36835891381345925, + "acc_norm_stderr": 0.016583858982639074 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42dot/42dot_LLM-PLM-1.3B", + "model_sha": "a72bf57eb02cd4ea4388a344b4a5893aa95698da", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json b/42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json new file mode 100644 index 0000000000000000000000000000000000000000..e28633aafb785215be2332ffde5050034d091502 --- /dev/null +++ b/42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28242320819112626, + "acc_stderr": 0.01315545688409722, + "acc_norm": 0.35494880546075086, + "acc_norm_stderr": 0.013983036904094094 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36317466640111534, + "acc_stderr": 0.004799317209902023, + "acc_norm": 0.4613622784305915, + "acc_norm_stderr": 0.004974860878464429 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.13592233009708737, + "acc_stderr": 0.033932957297610124, + "acc_norm": 0.13592233009708737, + "acc_norm_stderr": 0.033932957297610124 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150193, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150193 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386698, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386698 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.034605799075530276, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.034605799075530276 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2604501607717042, + "acc_stderr": 0.024926723224845543, + "acc_norm": 0.2604501607717042, + "acc_norm_stderr": 0.024926723224845543 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.242152466367713, + "acc_stderr": 0.028751392398694755, + "acc_norm": 0.242152466367713, + "acc_norm_stderr": 0.028751392398694755 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596918, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596918 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.02985751567338641, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.02985751567338641 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936246, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936246 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176892, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176892 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.021362027725222728, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.021362027725222728 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3034188034188034, + "acc_stderr": 0.030118210106942652, + "acc_norm": 0.3034188034188034, + "acc_norm_stderr": 0.030118210106942652 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2037735849056604, + "acc_stderr": 0.02479078450177541, + "acc_norm": 0.2037735849056604, + "acc_norm_stderr": 0.02479078450177541 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.035118075718047245, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.035118075718047245 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.18497109826589594, + "acc_stderr": 0.029605623981771204, + "acc_norm": 0.18497109826589594, + "acc_norm_stderr": 0.029605623981771204 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071128, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.025171041915309684, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.025171041915309684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803644, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803644 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796624, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796624 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.024739981355113592, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.024739981355113592 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.0327900040631005, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.0327900040631005 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.017704531653250075, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.017704531653250075 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729903, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729903 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802747, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802747 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.25, + "acc_stderr": 0.026303648393696036, + "acc_norm": 0.25, + "acc_norm_stderr": 0.026303648393696036 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866764, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.28270042194092826, + "acc_stderr": 0.029312814153955914, + "acc_norm": 0.28270042194092826, + "acc_norm_stderr": 0.029312814153955914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2503259452411995, + "acc_stderr": 0.011064151027165438, + "acc_norm": 0.2503259452411995, + "acc_norm_stderr": 0.011064151027165438 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603488, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603488 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.43765472485909873, + "mc2_stderr": 0.015405588178148114 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2550177095631641, + "acc_stderr": 0.014985559533428578, + "acc_norm": 0.3754427390791027, + "acc_norm_stderr": 0.016648411589511095 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42dot/42dot_LLM-SFT-1.3B", + "model_sha": "2dadd4492f0b27c302d8a5518003fa6045e32a8a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/PACK-13b-v1.0/result_2023-12-07 02:16:32.json b/AIFT/PACK-13b-v1.0/result_2023-12-07 02:16:32.json new file mode 100644 index 0000000000000000000000000000000000000000..0ee8c405b9da1af281d3a498af38cc4f44a04171 --- /dev/null +++ b/AIFT/PACK-13b-v1.0/result_2023-12-07 02:16:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3378839590443686, + "acc_stderr": 0.01382204792228351, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.01415063143511173 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37044413463453496, + "acc_stderr": 0.004819367172685971, + "acc_norm": 0.4788886675960964, + "acc_norm_stderr": 0.004985331652408348 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49169859514687103, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.49169859514687103, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.02375292871211213, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.02375292871211213 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3950617283950617, + "acc_stderr": 0.02720111766692566, + "acc_norm": 0.3950617283950617, + "acc_norm_stderr": 0.02720111766692566 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3963302752293578, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.3963302752293578, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706207, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29044117647058826, + "acc_stderr": 0.027576468622740505, + "acc_norm": 0.29044117647058826, + "acc_norm_stderr": 0.027576468622740505 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.031557828165561644, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.031557828165561644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3200782268578879, + "acc_stderr": 0.011914791947638522, + "acc_norm": 0.3200782268578879, + "acc_norm_stderr": 0.011914791947638522 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.034107853389047184, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.034107853389047184 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087312, + "mc2": 0.4274629100267272, + "mc2_stderr": 0.015462888327553083 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3482880755608028, + "acc_stderr": 0.016379926739148044, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.016929480234495232 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/PACK-13b-v1.0", + "model_sha": "27f7b1eb3d926034aa90feb9ebc31788182046dd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/PACK-13b-v1.1/result_2023-12-11 08:43:39.json b/AIFT/PACK-13b-v1.1/result_2023-12-11 08:43:39.json new file mode 100644 index 0000000000000000000000000000000000000000..17592aca2540f47dfceeb80a9c6e6f50e4b5bf12 --- /dev/null +++ b/AIFT/PACK-13b-v1.1/result_2023-12-11 08:43:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3361774744027304, + "acc_stderr": 0.013804855026205761, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.014194389086685261 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3724357697669787, + "acc_stderr": 0.004824655406075561, + "acc_norm": 0.48078072097191793, + "acc_norm_stderr": 0.004986093791041656 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4891443167305236, + "acc_stderr": 0.017875748840242414, + "acc_norm": 0.4891443167305236, + "acc_norm_stderr": 0.017875748840242414 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.037262143543224144, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.037262143543224144 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.02435958146539696, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.02435958146539696 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.029582245128384296, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.029582245128384296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.027237415094592474, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.027237415094592474 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3963302752293578, + "acc_stderr": 0.02097146994790053, + "acc_norm": 0.3963302752293578, + "acc_norm_stderr": 0.02097146994790053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02791405551046802, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02791405551046802 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.01885008469646872, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.01885008469646872 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169945, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169945 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.03070137211151092, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.03070137211151092 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144686, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144686 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.031557828165561644, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.031557828165561644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4936708860759494, + "acc_stderr": 0.032544620107678585, + "acc_norm": 0.4936708860759494, + "acc_norm_stderr": 0.032544620107678585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30182529335071706, + "acc_stderr": 0.01172435051810589, + "acc_norm": 0.30182529335071706, + "acc_norm_stderr": 0.01172435051810589 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.03402272044340704, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.03402272044340704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087307, + "mc2": 0.42195295057052135, + "mc2_stderr": 0.015423294021851608 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3317591499409681, + "acc_stderr": 0.016187984642157312, + "acc_norm": 0.3955135773317591, + "acc_norm_stderr": 0.01681081590220604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/PACK-13b-v1.1", + "model_sha": "a547563032d1b762d80a80959f9b00aefab44eb5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/aift-llama2-koen-instruct-dpo-v1.01/result_2023-12-14 06:44:33.json b/AIFT/aift-llama2-koen-instruct-dpo-v1.01/result_2023-12-14 06:44:33.json new file mode 100644 index 0000000000000000000000000000000000000000..60c7c942b96270582d72f0e5a659a36df87535f8 --- /dev/null +++ b/AIFT/aift-llama2-koen-instruct-dpo-v1.01/result_2023-12-14 06:44:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4069965870307167, + "acc_stderr": 0.014356399418009124, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.01457014449507558 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4193387771360287, + "acc_stderr": 0.00492442401807367, + "acc_norm": 0.5768771161123282, + "acc_norm_stderr": 0.004930448527146668 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.01776925058353325, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.01776925058353325 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168862, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168862 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.024939313906940777, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.024939313906940777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413865, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413865 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.026918645383239015, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.026918645383239015 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833935, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833935 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147124, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147124 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490437, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490437 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281515, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966346, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966346 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789848, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789848 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34028683181225555, + "acc_stderr": 0.012101217610223782, + "acc_norm": 0.34028683181225555, + "acc_norm_stderr": 0.012101217610223782 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.01510240479735965, + "mc2": 0.40156672902861484, + "mc2_stderr": 0.015311585666350696 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3754427390791027, + "acc_stderr": 0.01664841158951109, + "acc_norm": 0.38134592680047225, + "acc_norm_stderr": 0.016699301768828084 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/aift-llama2-koen-instruct-dpo-v1.01", + "model_sha": "1c1e407910427042580cb11ad8569567c769cf49", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/aift-llama2-koen-instruct-dpo-v1.02/result_2023-12-15 03:06:44.json b/AIFT/aift-llama2-koen-instruct-dpo-v1.02/result_2023-12-15 03:06:44.json new file mode 100644 index 0000000000000000000000000000000000000000..d45ae087804cadf720b98e0a4eae4d53910a5d8c --- /dev/null +++ b/AIFT/aift-llama2-koen-instruct-dpo-v1.02/result_2023-12-15 03:06:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4052901023890785, + "acc_stderr": 0.014346869060229327, + "acc_norm": 0.4658703071672355, + "acc_norm_stderr": 0.014577311315231097 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4192391953794065, + "acc_stderr": 0.004924261467934422, + "acc_norm": 0.5763792073292173, + "acc_norm_stderr": 0.004931219148182245 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.032025630761017346, + "acc_norm": 0.4, + "acc_norm_stderr": 0.032025630761017346 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056127, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056127 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734026, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734026 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844086, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844086 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523857, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523857 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.02687408588351835, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.02687408588351835 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.021162420048273515, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.021162420048273515 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.02852638345214263, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.02852638345214263 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024113, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024113 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2670391061452514, + "acc_stderr": 0.014796502622562546, + "acc_norm": 0.2670391061452514, + "acc_norm_stderr": 0.014796502622562546 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280055, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280055 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34028683181225555, + "acc_stderr": 0.01210121761022378, + "acc_norm": 0.34028683181225555, + "acc_norm_stderr": 0.01210121761022378 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.01494881267906214, + "mc2": 0.39165785820787247, + "mc2_stderr": 0.015096702357183963 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3577331759149941, + "acc_stderr": 0.016479808935749976, + "acc_norm": 0.36481700118063753, + "acc_norm_stderr": 0.01655014433704659 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/aift-llama2-koen-instruct-dpo-v1.02", + "model_sha": "9e9887d8579e1d19943d9d10f0d340620328c852", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/aift-llama2-koen-instruct-v1.0/result_2023-12-14 00:45:21.json b/AIFT/aift-llama2-koen-instruct-v1.0/result_2023-12-14 00:45:21.json new file mode 100644 index 0000000000000000000000000000000000000000..7ddee52d0b12d08952b999bb8ef9a4801bd7f030 --- /dev/null +++ b/AIFT/aift-llama2-koen-instruct-v1.0/result_2023-12-14 00:45:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3771331058020478, + "acc_stderr": 0.0141633668961926, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.01448798619718605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4151563433578968, + "acc_stderr": 0.004917419367766031, + "acc_norm": 0.5669189404501095, + "acc_norm_stderr": 0.004944889545497955 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5491698595146871, + "acc_stderr": 0.017793297572699034, + "acc_norm": 0.5491698595146871, + "acc_norm_stderr": 0.017793297572699034 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562804, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562804 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.025348097468097856, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.025348097468097856 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.02345603738398202, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.02345603738398202 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5596330275229358, + "acc_stderr": 0.021284310623761536, + "acc_norm": 0.5596330275229358, + "acc_norm_stderr": 0.021284310623761536 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401147, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401147 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101376, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016643, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016643 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228563, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228563 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.39725650408832863, + "mc2_stderr": 0.01469261681765968 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4179456906729634, + "acc_stderr": 0.016957292005279713, + "acc_norm": 0.4817001180637544, + "acc_norm_stderr": 0.01717883663917776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/aift-llama2-koen-instruct-v1.0", + "model_sha": "54a5a30188cba6af653f20df22ff393472f0e161", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/aift-llama2-koen-instruct-v1.1-dpo-test1/result_2023-12-18 03:42:48.json b/AIFT/aift-llama2-koen-instruct-v1.1-dpo-test1/result_2023-12-18 03:42:48.json new file mode 100644 index 0000000000000000000000000000000000000000..b1c83909ec7cc53c408c356a42a614b946e2e2e7 --- /dev/null +++ b/AIFT/aift-llama2-koen-instruct-v1.1-dpo-test1/result_2023-12-18 03:42:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979279, + "acc_norm": 0.44112627986348124, + "acc_norm_stderr": 0.014509747749064664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41943835889265085, + "acc_stderr": 0.004924586362301652, + "acc_norm": 0.5719976100378411, + "acc_norm_stderr": 0.004937779821908573 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.01776925058353325, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.01776925058353325 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.032422250271150074, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.032422250271150074 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.02489047176993815, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.02489047176993815 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268187, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268187 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766107, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766107 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5651376146788991, + "acc_stderr": 0.021254631465609273, + "acc_norm": 0.5651376146788991, + "acc_norm_stderr": 0.021254631465609273 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392869, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392869 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.019780465954777515, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.019780465954777515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101376, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.01459362092321074, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.01459362092321074 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254167, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254167 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228563, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228563 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367994, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367994 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.015077219200662574, + "mc2": 0.40225459810500935, + "mc2_stderr": 0.01478258523910622 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38488783943329397, + "acc_stderr": 0.016728579701498665, + "acc_norm": 0.4427390791027155, + "acc_norm_stderr": 0.017077254131556217 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/aift-llama2-koen-instruct-v1.1-dpo-test1", + "model_sha": "a416328b862669edfe25be6c305bc9f5ccc4d727", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/aift-llama2-koen-instruct-v1.1/result_2023-12-15 09:57:09.json b/AIFT/aift-llama2-koen-instruct-v1.1/result_2023-12-15 09:57:09.json new file mode 100644 index 0000000000000000000000000000000000000000..4937eda26f7b6aedf7bf650410de8b0f1b3f82ff --- /dev/null +++ b/AIFT/aift-llama2-koen-instruct-v1.1/result_2023-12-15 09:57:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979279, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804243 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41894045010953995, + "acc_stderr": 0.004923772581848503, + "acc_norm": 0.5714997012547302, + "acc_norm_stderr": 0.004938500303990289 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.01776925058353325, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.01776925058353325 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177476, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177476 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.032422250271150074, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.032422250271150074 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767762, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767762 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268187, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268187 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275798, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275798 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5614678899082569, + "acc_stderr": 0.021274713073954562, + "acc_norm": 0.5614678899082569, + "acc_norm_stderr": 0.021274713073954562 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292535, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292535 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.01984828016840115, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.01984828016840115 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.01465578083749773, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.01465578083749773 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335314, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34810951760104303, + "acc_stderr": 0.0121667389936982, + "acc_norm": 0.34810951760104303, + "acc_norm_stderr": 0.0121667389936982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367994, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367994 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.4028203603366851, + "mc2_stderr": 0.01477432836961688 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38961038961038963, + "acc_stderr": 0.0167661616718935, + "acc_norm": 0.45218417945690675, + "acc_norm_stderr": 0.017111567130916782 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/aift-llama2-koen-instruct-v1.1", + "model_sha": "202c3e3df0c4a321503df8d4c78da213f1ae5475", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/aift-llama2-koen-instruct-v1.2/result_2023-12-16 03:29:50.json b/AIFT/aift-llama2-koen-instruct-v1.2/result_2023-12-16 03:29:50.json new file mode 100644 index 0000000000000000000000000000000000000000..2c1ce2534191f83e655a9ab08f839116114ce27e --- /dev/null +++ b/AIFT/aift-llama2-koen-instruct-v1.2/result_2023-12-16 03:29:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38310580204778155, + "acc_stderr": 0.01420647266167288, + "acc_norm": 0.4453924914675768, + "acc_norm_stderr": 0.014523987638344074 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42113124875522806, + "acc_stderr": 0.004927314729433555, + "acc_norm": 0.578370842461661, + "acc_norm_stderr": 0.004928105880776078 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.038237270928823064, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.038237270928823064 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5644955300127714, + "acc_stderr": 0.017730589927926588, + "acc_norm": 0.5644955300127714, + "acc_norm_stderr": 0.017730589927926588 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400352, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400352 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177687, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177687 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.024720713193952148, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.024720713193952148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.038818912133343826, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.038818912133343826 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.02123336503031956, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.02123336503031956 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292535, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292535 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635317, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635317 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396587, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35658409387222945, + "acc_stderr": 0.012233642989273886, + "acc_norm": 0.35658409387222945, + "acc_norm_stderr": 0.012233642989273886 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299946, + "mc2": 0.4052899642454083, + "mc2_stderr": 0.014924042516908636 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38488783943329397, + "acc_stderr": 0.016728579701498672, + "acc_norm": 0.4510035419126328, + "acc_norm_stderr": 0.017107618859549357 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/aift-llama2-koen-instruct-v1.2", + "model_sha": "95f3e7cce5bebe90ac4ff8f07597be444e7e1a9e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/LLAMA-2-13b-ko-Y24-DPO_v0.1/result_2023-12-18 01:34:17.json b/AIdenU/LLAMA-2-13b-ko-Y24-DPO_v0.1/result_2023-12-18 01:34:17.json new file mode 100644 index 0000000000000000000000000000000000000000..b9dee46b7c30f941ea2f8e573cc4809803eabe0d --- /dev/null +++ b/AIdenU/LLAMA-2-13b-ko-Y24-DPO_v0.1/result_2023-12-18 01:34:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.013621696119173297, + "acc_norm": 0.3779863481228669, + "acc_norm_stderr": 0.014169664520303103 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36446922923720376, + "acc_stderr": 0.004802974070507201, + "acc_norm": 0.46883091017725553, + "acc_norm_stderr": 0.004980076707392429 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.03786720706234215, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.03786720706234215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4891443167305236, + "acc_stderr": 0.01787574884024242, + "acc_norm": 0.4891443167305236, + "acc_norm_stderr": 0.01787574884024242 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956278, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956278 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416828, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416828 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985754, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985754 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.031937057262002924, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.031937057262002924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.030242233800854498, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.030242233800854498 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276612, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276612 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491841, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491841 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.0272725828498398, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.0272725828498398 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.0453781535493939, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.0453781535493939 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.021244146569074345, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.021244146569074345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423556, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423556 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.018875682938069436, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.018875682938069436 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.02746470844202212, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.02746470844202212 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.03154696285656628, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.03154696285656628 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335317, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.032498227183013026, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.032498227183013026 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.011773980329380731, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.011773980329380731 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.015077219200662568, + "mc2": 0.4153514851890886, + "mc2_stderr": 0.01500188114852866 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.0169835060795776, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/LLAMA-2-13b-ko-Y24-DPO_v0.1", + "model_sha": "10c4f59aa0a45a331f9a3288f05daa29d9dc79df", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/Mistral-7b-ko-Y24-DPO_v0.1/result_2023-12-21 04:18:43.json b/AIdenU/Mistral-7b-ko-Y24-DPO_v0.1/result_2023-12-21 04:18:43.json new file mode 100644 index 0000000000000000000000000000000000000000..bebfdb58d26165570cb00af0fca9e60b9a2d1a1f --- /dev/null +++ b/AIdenU/Mistral-7b-ko-Y24-DPO_v0.1/result_2023-12-21 04:18:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35665529010238906, + "acc_stderr": 0.013998056902620196, + "acc_norm": 0.4052901023890785, + "acc_norm_stderr": 0.014346869060229328 + }, + "harness|ko_hellaswag|10": { + "acc": 0.377912766381199, + "acc_stderr": 0.004838747305783345, + "acc_norm": 0.49153555068711413, + "acc_norm_stderr": 0.004989066355449555 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5006385696040868, + "acc_stderr": 0.017879948914431697, + "acc_norm": 0.5006385696040868, + "acc_norm_stderr": 0.017879948914431697 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.02836504154256457, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.02836504154256457 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539743, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.028447965476231022, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.028447965476231022 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622841, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622841 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.024870815251057093, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.024870815251057093 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.02686462436675664, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.02686462436675664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048409, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048409 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5137614678899083, + "acc_stderr": 0.021429202089874075, + "acc_norm": 0.5137614678899083, + "acc_norm_stderr": 0.021429202089874075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.01970687580408563, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.01970687580408563 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.01463518561652784, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.01463518561652784 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406787, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406787 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.032230171959376, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.032230171959376 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104104, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104104 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015021, + "mc2": 0.4379686054133816, + "mc2_stderr": 0.015396278996687385 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.017188329219654276, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/Mistral-7b-ko-Y24-DPO_v0.1", + "model_sha": "78813fb52898d37d6c0637b7fb93eb2c5bc23f55", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/Mistral-7b-ko-Y24_v0.1/result_2023-12-21 04:19:12.json b/AIdenU/Mistral-7b-ko-Y24_v0.1/result_2023-12-21 04:19:12.json new file mode 100644 index 0000000000000000000000000000000000000000..5f45c6f39f933c90f04316c6b6172f95d390a7ed --- /dev/null +++ b/AIdenU/Mistral-7b-ko-Y24_v0.1/result_2023-12-21 04:19:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3506825938566553, + "acc_stderr": 0.01394463593072609, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.014291228393536588 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37870942043417644, + "acc_stderr": 0.004840742206718092, + "acc_norm": 0.4885480979884485, + "acc_norm_stderr": 0.0049884724594180295 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.017879248970584353, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.017879248970584353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.021436998359765317, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.021436998359765317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271768, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271768 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210746, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210746 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335317, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3213820078226858, + "acc_stderr": 0.011927581352265076, + "acc_norm": 0.3213820078226858, + "acc_norm_stderr": 0.011927581352265076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237272, + "mc2": 0.4338150951405425, + "mc2_stderr": 0.01536129905959147 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4982290436835891, + "acc_stderr": 0.01719024627623186, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.01710761885954935 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/Mistral-7b-ko-Y24_v0.1", + "model_sha": "a0b72b81f985f7fb06695cae82877ca482947dbf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AtAndDev/ShortKingv0.1/result_2023-09-29 19:59:47.json b/AtAndDev/ShortKingv0.1/result_2023-09-29 19:59:47.json new file mode 100644 index 0000000000000000000000000000000000000000..52ed0ae111f0fc859f3bcb9a50e444d1b486e227 --- /dev/null +++ b/AtAndDev/ShortKingv0.1/result_2023-09-29 19:59:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19112627986348124, + "acc_stderr": 0.011490055292778596, + "acc_norm": 0.24829351535836178, + "acc_norm_stderr": 0.012624912868089764 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2756423023302131, + "acc_stderr": 0.0044592414745187915, + "acc_norm": 0.29884485162318264, + "acc_norm_stderr": 0.004568161710399566 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691582, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691582 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777555, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777555 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617722, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617722 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.02977164271249123, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.02977164271249123 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680588, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680588 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.20257234726688103, + "acc_stderr": 0.022827317491059686, + "acc_norm": 0.20257234726688103, + "acc_norm_stderr": 0.022827317491059686 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.029376616484945644, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.029376616484945644 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924812, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924812 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.035240689515674474, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.035240689515674474 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.027205371538279476, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.027205371538279476 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2153846153846154, + "acc_stderr": 0.020843034557462878, + "acc_norm": 0.2153846153846154, + "acc_norm_stderr": 0.020843034557462878 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.13793103448275862, + "acc_stderr": 0.024261984301044565, + "acc_norm": 0.13793103448275862, + "acc_norm_stderr": 0.024261984301044565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.20967741935483872, + "acc_stderr": 0.023157879349083536, + "acc_norm": 0.20967741935483872, + "acc_norm_stderr": 0.023157879349083536 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2094017094017094, + "acc_stderr": 0.026655699653922737, + "acc_norm": 0.2094017094017094, + "acc_norm_stderr": 0.026655699653922737 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.026616482980501715, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.026616482980501715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.03036049015401464, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.03036049015401464 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173043, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173043 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.023618678310069363, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.023618678310069363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02378858355165854, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02378858355165854 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.02925282329180363, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.02925282329180363 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322004, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322004 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21100917431192662, + "acc_stderr": 0.017493922404112648, + "acc_norm": 0.21100917431192662, + "acc_norm_stderr": 0.017493922404112648 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.024288619466046116, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.024288619466046116 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19008264462809918, + "acc_stderr": 0.03581796951709282, + "acc_norm": 0.19008264462809918, + "acc_norm_stderr": 0.03581796951709282 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.017740899509177795, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.017740899509177795 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460997, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460997 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23016759776536314, + "acc_stderr": 0.014078339253425809, + "acc_norm": 0.23016759776536314, + "acc_norm_stderr": 0.014078339253425809 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.02767846864214471, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.02767846864214471 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3142857142857143, + "acc_stderr": 0.029719329422417468, + "acc_norm": 0.3142857142857143, + "acc_norm_stderr": 0.029719329422417468 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.02904133351059804, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.02904133351059804 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045526, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045526 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507955, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507955 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589674, + "mc2": 0.49219803033147647, + "mc2_stderr": 0.015947492879186672 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2361275088547816, + "acc_stderr": 0.014601536093324397, + "acc_norm": 0.27508854781582054, + "acc_norm_stderr": 0.015353010757952649 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AtAndDev/ShortKingv0.1", + "model_sha": "6cd9b5bc13ee15b5e7e7cfb46477bc6a7c0b5d47", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/llama-2-ko-7b-it-v1.0.0/result_2023-11-15 11:34:52.json b/BM-K/llama-2-ko-7b-it-v1.0.0/result_2023-11-15 11:34:52.json new file mode 100644 index 0000000000000000000000000000000000000000..e730522c11119bda3e69fddc79ec406f1be5e942 --- /dev/null +++ b/BM-K/llama-2-ko-7b-it-v1.0.0/result_2023-11-15 11:34:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.0139289334613825, + "acc_norm": 0.38310580204778155, + "acc_norm_stderr": 0.01420647266167288 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3973312089225254, + "acc_stderr": 0.00488345518890897, + "acc_norm": 0.518621788488349, + "acc_norm_stderr": 0.004986319587524962 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.036602988340491624, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.036602988340491624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.35759897828863346, + "acc_stderr": 0.017139488998803288, + "acc_norm": 0.35759897828863346, + "acc_norm_stderr": 0.017139488998803288 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.029644006577009618, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.029644006577009618 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.033844291552331346, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.033844291552331346 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3729903536977492, + "acc_stderr": 0.027466610213140105, + "acc_norm": 0.3729903536977492, + "acc_norm_stderr": 0.027466610213140105 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.26905829596412556, + "acc_stderr": 0.029763779406874975, + "acc_norm": 0.26905829596412556, + "acc_norm_stderr": 0.029763779406874975 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.04142313771996664, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.04142313771996664 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419036, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419036 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277723, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277723 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.021362027725222738, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.021362027725222738 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022877, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022877 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432118, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432118 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946458, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946458 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3880597014925373, + "acc_stderr": 0.034457899643627506, + "acc_norm": 0.3880597014925373, + "acc_norm_stderr": 0.034457899643627506 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321658, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321658 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776578, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776578 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624576, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624576 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33815028901734107, + "acc_stderr": 0.02546977014940017, + "acc_norm": 0.33815028901734107, + "acc_norm_stderr": 0.02546977014940017 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.03351953879521269, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.03351953879521269 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3549382716049383, + "acc_stderr": 0.026624152478845853, + "acc_norm": 0.3549382716049383, + "acc_norm_stderr": 0.026624152478845853 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3211009174311927, + "acc_stderr": 0.020018149772733747, + "acc_norm": 0.3211009174311927, + "acc_norm_stderr": 0.020018149772733747 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102149, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102149 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.026787453111906532, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.026787453111906532 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4628099173553719, + "acc_stderr": 0.045517111961042175, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.045517111961042175 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000534, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468638, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468638 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.027257202606114948, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.027257202606114948 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.0287951855742913, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.0287951855742913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301843, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301843 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2907431551499348, + "acc_stderr": 0.011598062372851981, + "acc_norm": 0.2907431551499348, + "acc_norm_stderr": 0.011598062372851981 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693268, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693268 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511784, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511784 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752329, + "mc2": 0.3762518297834469, + "mc2_stderr": 0.015197001689915996 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31759149940968123, + "acc_stderr": 0.016005581876229306, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.0168363772928493 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/llama-2-ko-7b-it-v1.0.0", + "model_sha": "d77fd44b31382f84fa4b8b9afd63a92ded7bde93", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.0.1/result_2023-11-07 22:19:25.json b/BM-K/mistral-7b-it-v1.0.1/result_2023-11-07 22:19:25.json new file mode 100644 index 0000000000000000000000000000000000000000..a701d6d75b0d2935ccf9c496fbfb8913e73bd1f8 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.0.1/result_2023-11-07 22:19:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35665529010238906, + "acc_stderr": 0.013998056902620196, + "acc_norm": 0.41467576791808874, + "acc_norm_stderr": 0.014397070564409174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38329018123879705, + "acc_stderr": 0.004851944170671259, + "acc_norm": 0.4987054371639116, + "acc_norm_stderr": 0.004989764686738831 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.017870847506081738, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.017870847506081738 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.031709956060406545, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.031709956060406545 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234353, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234353 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.03424084669891524, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.03424084669891524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.03461199429040014, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.03461199429040014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.0248708152510571, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.0248708152510571 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668784, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46605504587155966, + "acc_stderr": 0.021387863350354, + "acc_norm": 0.46605504587155966, + "acc_norm_stderr": 0.021387863350354 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.01982184368827177, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.01982184368827177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631157, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298825, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298825 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767864, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767864 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.02850145286039656, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.02850145286039656 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214936, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214936 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950235, + "mc2": 0.453794908688158, + "mc2_stderr": 0.015317536289389658 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3600944510035419, + "acc_stderr": 0.016503686720440072, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.017185069732676514 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.0.1", + "model_sha": "710fbce5dd54e5794f1bcdf4f53d3c0ceeafb405", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.0.2/result_2023-11-08 13:07:51.json b/BM-K/mistral-7b-it-v1.0.2/result_2023-11-08 13:07:51.json new file mode 100644 index 0000000000000000000000000000000000000000..dd9170f52fe04ac3c2158d342c9ccb4cb0387369 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.0.2/result_2023-11-08 13:07:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3387372013651877, + "acc_stderr": 0.013830568927974332, + "acc_norm": 0.4035836177474403, + "acc_norm_stderr": 0.01433715891426845 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3811989643497311, + "acc_stderr": 0.00484688692976347, + "acc_norm": 0.4954192391953794, + "acc_norm_stderr": 0.004989572002196691 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.508301404853129, + "acc_stderr": 0.017877498991072, + "acc_norm": 0.508301404853129, + "acc_norm_stderr": 0.017877498991072 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771124, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771124 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685516, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685516 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173078, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683522, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683522 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.02143642095552942, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.02143642095552942 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.02852638345214264, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.02852638345214264 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777473, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777473 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412236, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412236 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296378, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296378 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21899441340782122, + "acc_stderr": 0.013831676687303205, + "acc_norm": 0.21899441340782122, + "acc_norm_stderr": 0.013831676687303205 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824873, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824873 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344204, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344204 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394812, + "mc2": 0.4533712341088757, + "mc2_stderr": 0.015449105919584536 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3825265643447462, + "acc_stderr": 0.016709165387228806, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.017189767032130824 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.0.2", + "model_sha": "06668a57b990007d15d178c94aabd162d6af9531", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.0.3/result_2023-11-09 00:04:55.json b/BM-K/mistral-7b-it-v1.0.3/result_2023-11-09 00:04:55.json new file mode 100644 index 0000000000000000000000000000000000000000..6d334a3e6b2a76970498f6b9f193e985ac9106ec --- /dev/null +++ b/BM-K/mistral-7b-it-v1.0.3/result_2023-11-09 00:04:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3370307167235495, + "acc_stderr": 0.013813476652902272, + "acc_norm": 0.3993174061433447, + "acc_norm_stderr": 0.014312094557946707 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3791077474606652, + "acc_stderr": 0.00484173445350666, + "acc_norm": 0.4907388966341366, + "acc_norm_stderr": 0.00498892541052277 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.04944901092973779, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.04944901092973779 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.51213282247765, + "acc_stderr": 0.017874698667491334, + "acc_norm": 0.51213282247765, + "acc_norm_stderr": 0.017874698667491334 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562786, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562786 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094785, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094785 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684973, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608466, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608466 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548914, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548914 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995093, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412243, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412243 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639875, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639875 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3213820078226858, + "acc_stderr": 0.01192758135226508, + "acc_norm": 0.3213820078226858, + "acc_norm_stderr": 0.01192758135226508 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380611, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380611 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.038783721137112745, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.038783721137112745 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608753, + "mc2": 0.44405801770483816, + "mc2_stderr": 0.015315267499738446 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.016114023894800333, + "acc_norm": 0.46162927981109797, + "acc_norm_stderr": 0.017139660221845564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.0.3", + "model_sha": "5d368e894e2091bd003ae65710009016b7bafaa0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.0/result_2023-11-07 08:10:27.json b/BM-K/mistral-7b-it-v1.0/result_2023-11-07 08:10:27.json new file mode 100644 index 0000000000000000000000000000000000000000..1e38491cc801cec5d6f7063a3dc7a867ad00a108 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.0/result_2023-11-07 08:10:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.013975454122756562, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3875721967735511, + "acc_stderr": 0.004862003566798545, + "acc_norm": 0.504282015534754, + "acc_norm_stderr": 0.004989598426249547 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.51213282247765, + "acc_stderr": 0.01787469866749133, + "acc_norm": 0.51213282247765, + "acc_norm_stderr": 0.01787469866749133 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685516, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685516 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111288, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111288 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054062, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054062 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.03032594578928611, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.03032594578928611 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804723, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804723 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332786, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332786 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48256880733944957, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.48256880733944957, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215934, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215934 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291518, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291518 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983576, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540472, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540472 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608742, + "mc2": 0.4447858809482175, + "mc2_stderr": 0.015211057250300537 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3860684769775679, + "acc_stderr": 0.016738130760321743, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.017186891286894067 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.0", + "model_sha": "f5bfb9dc4f4dd8b64d45c9a158e3982959b18035", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.1.0/result_2023-11-13 23:21:20.json b/BM-K/mistral-7b-it-v1.1.0/result_2023-11-13 23:21:20.json new file mode 100644 index 0000000000000000000000000000000000000000..24fd7efe9d273c171df8874fbc06202cec653931 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.1.0/result_2023-11-13 23:21:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038078, + "acc_norm": 0.4232081911262799, + "acc_norm_stderr": 0.01443803622084802 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37223660625373434, + "acc_stderr": 0.004824130528590597, + "acc_norm": 0.47610037841067515, + "acc_norm_stderr": 0.004984077906216103 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.017870847506081727, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.017870847506081727 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.028043399858210628, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.028043399858210628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330315, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330315 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767762, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767762 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.031937057262002924, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.031937057262002924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.0368122963339432, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.0368122963339432 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596433, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46422018348623856, + "acc_stderr": 0.0213823647757019, + "acc_norm": 0.46422018348623856, + "acc_norm_stderr": 0.0213823647757019 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.01933314202079706, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.01933314202079706 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469417, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469417 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961464, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311183, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311183 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.32653061224489793, + "acc_stderr": 0.030021056238440303, + "acc_norm": 0.32653061224489793, + "acc_norm_stderr": 0.030021056238440303 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5274261603375527, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.5274261603375527, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29791395045632335, + "acc_stderr": 0.01168071734040005, + "acc_norm": 0.29791395045632335, + "acc_norm_stderr": 0.01168071734040005 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.01568092936402462, + "mc2": 0.4553516695896828, + "mc2_stderr": 0.01619950826163877 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3069657615112161, + "acc_stderr": 0.015857588095362814, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.016366945603281273 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.1.0", + "model_sha": "7cf13a6ab9a4f0231b168a8102d784fc6c22510a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.2.0/result_2023-11-14 04:32:36.json b/BM-K/mistral-7b-it-v1.2.0/result_2023-11-14 04:32:36.json new file mode 100644 index 0000000000000000000000000000000000000000..516b11cd189154e88b6dea143165cb4df98d8922 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.2.0/result_2023-11-14 04:32:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.013796182947785564, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.014194389086685251 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3736307508464449, + "acc_stderr": 0.00482778628907485, + "acc_norm": 0.4671380203146783, + "acc_norm_stderr": 0.004978992721242828 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.42911877394636017, + "acc_stderr": 0.017699388483126795, + "acc_norm": 0.42911877394636017, + "acc_norm_stderr": 0.017699388483126795 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.02809924077580955, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.02809924077580955 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929187, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929187 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.0355580405176393, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.0355580405176393 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177476, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177476 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686856, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686856 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043841, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043841 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286102, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286102 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114975, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114975 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488585, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488585 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.027044538138402616, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.027044538138402616 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.01933314202079706, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.01933314202079706 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031225, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031225 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.011787910251664587, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.011787910251664587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627877, + "mc2": 0.4571128110826051, + "mc2_stderr": 0.0163313732350845 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30460448642266824, + "acc_stderr": 0.015823367273129395, + "acc_norm": 0.3293978748524203, + "acc_norm_stderr": 0.016158746868147146 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.2.0", + "model_sha": "d233f62c06362a1008b268f72e919bd6eaf96166", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.3.0/result_2023-11-15 15:01:55.json b/BM-K/mistral-7b-it-v1.3.0/result_2023-11-15 15:01:55.json new file mode 100644 index 0000000000000000000000000000000000000000..2c9ee20c57981fc28f50a832431111814655d8a5 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.3.0/result_2023-11-15 15:01:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145687, + "acc_norm": 0.3873720136518771, + "acc_norm_stderr": 0.014235872487909872 + }, + "harness|ko_hellaswag|10": { + "acc": 0.382194781915953, + "acc_stderr": 0.004849306998727764, + "acc_norm": 0.4986058554072894, + "acc_norm_stderr": 0.004989762014739187 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47126436781609193, + "acc_stderr": 0.01785041079438017, + "acc_norm": 0.47126436781609193, + "acc_norm_stderr": 0.01785041079438017 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.035534363688280626, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.035534363688280626 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.034953345821629324, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.034953345821629324 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836914, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836914 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473075, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442203, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442203 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762637, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762637 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30837988826815643, + "acc_stderr": 0.01544571691099888, + "acc_norm": 0.30837988826815643, + "acc_norm_stderr": 0.01544571691099888 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.510548523206751, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.510548523206751, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608742, + "mc2": 0.4538855040890016, + "mc2_stderr": 0.015473472871845475 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5371900826446281, + "acc_stderr": 0.017142736117643304, + "acc_norm": 0.5796930342384888, + "acc_norm_stderr": 0.01697059828117771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.3.0", + "model_sha": "995ca1c4360613685103c646f290b0062770ec7b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.5.0/result_2023-11-16 23:47:33.json b/BM-K/mistral-7b-it-v1.5.0/result_2023-11-16 23:47:33.json new file mode 100644 index 0000000000000000000000000000000000000000..cd5e57126b5dded8fd760c56a4e69784828a3510 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.5.0/result_2023-11-16 23:47:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.01384746051889298, + "acc_norm": 0.37457337883959047, + "acc_norm_stderr": 0.014144193471893446 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3906592312288389, + "acc_stderr": 0.004869010152280755, + "acc_norm": 0.5010953993228441, + "acc_norm_stderr": 0.004989769436956922 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.01783579880629064, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.01783579880629064 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984548, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984548 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942638, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942638 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796183, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028417, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.021414757058175506, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.021414757058175506 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852387, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852387 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.01959402113657745, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.01959402113657745 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347233, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347233 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010078, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010078 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.03171752824062665, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.03171752824062665 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087307, + "mc2": 0.4312897833619012, + "mc2_stderr": 0.015536038118672747 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4037780401416765, + "acc_stderr": 0.01686903154029863, + "acc_norm": 0.42502951593860683, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.5.0", + "model_sha": "59b094a8741371d220147b53e7536af0fcf27d2e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.6.0/result_2023-11-19 13:15:31.json b/BM-K/mistral-7b-it-v1.6.0/result_2023-11-19 13:15:31.json new file mode 100644 index 0000000000000000000000000000000000000000..7070772540166679df37f733b6c267dedbd89e59 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.6.0/result_2023-11-19 13:15:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.01384746051889298, + "acc_norm": 0.3728668941979522, + "acc_norm_stderr": 0.014131176760131163 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3867755427205736, + "acc_stderr": 0.00486016207633099, + "acc_norm": 0.4992033459470225, + "acc_norm_stderr": 0.0049897750778356495 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.017857770704901018, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.017857770704901018 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085335, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085335 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657553, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657553 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849724, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849724 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173095, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173095 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752056, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379414, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379414 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270699, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270699 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.021410999753635918, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.021410999753635918 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.043758884927270605, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.043758884927270605 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094597, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094597 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578728, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578728 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2860335195530726, + "acc_stderr": 0.015113972129062125, + "acc_norm": 0.2860335195530726, + "acc_norm_stderr": 0.015113972129062125 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.01206708307945223, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.01206708307945223 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.034107853389047184, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.034107853389047184 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608742, + "mc2": 0.4436091279270421, + "mc2_stderr": 0.015994798162179236 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5194805194805194, + "acc_stderr": 0.01717730199234254, + "acc_norm": 0.5430932703659976, + "acc_norm_stderr": 0.017126389093086777 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.6.0", + "model_sha": "b149b065fe748591389f5ce440e424a118880e26", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.7.0/result_2023-11-20 09:22:14.json b/BM-K/mistral-7b-it-v1.7.0/result_2023-11-20 09:22:14.json new file mode 100644 index 0000000000000000000000000000000000000000..f37243c7fe16a115b6d13aab9e4168d9c1042971 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.7.0/result_2023-11-20 09:22:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.01388881628678211, + "acc_norm": 0.386518771331058, + "acc_norm_stderr": 0.014230084761910474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3873730332603067, + "acc_stderr": 0.004861544478451848, + "acc_norm": 0.5089623580959968, + "acc_norm_stderr": 0.004988979750014428 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.017869330154003705, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.017869330154003705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956278, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956278 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929778, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.0345905881588323, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.0345905881588323 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948482, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.026882643434022885, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.026882643434022885 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.021432956203453316, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.021432956203453316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400355, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.01980828131744984, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.01980828131744984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044811, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044811 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220517, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220517 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824862, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824862 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.012020128195985774, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.012020128195985774 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.4714881280704747, + "mc2_stderr": 0.01554412183162796 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5230224321133412, + "acc_stderr": 0.017172121546727634, + "acc_norm": 0.577331759149941, + "acc_norm_stderr": 0.016983506079577604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.7.0", + "model_sha": "f62174ae285bf46cc453305f1e0b76899a8bcf82", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.7.1/result_2023-11-21 00:39:48.json b/BM-K/mistral-7b-it-v1.7.1/result_2023-11-21 00:39:48.json new file mode 100644 index 0000000000000000000000000000000000000000..15007c601f1e8b8413cca3350d78e53d76fa95ab --- /dev/null +++ b/BM-K/mistral-7b-it-v1.7.1/result_2023-11-21 00:39:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145685, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.014291228393536587 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38717386974706236, + "acc_stderr": 0.00486108453408704, + "acc_norm": 0.5087631945827524, + "acc_norm_stderr": 0.004989014986235632 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.032232762667117124, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.032232762667117124 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126174, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126174 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999998, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999998 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699954, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401147, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401147 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611306, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611306 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095285, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411127, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411127 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.01197150729498278, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.01197150729498278 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.0158663464013843, + "mc2": 0.46609022121434857, + "mc2_stderr": 0.015577378664296664 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5112160566706021, + "acc_stderr": 0.01718602846948929, + "acc_norm": 0.564344746162928, + "acc_norm_stderr": 0.017047415229476323 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.7.1", + "model_sha": "d9b8182352893418c91400683819f41d7be1e292", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.7.2/result_2023-11-30 06:16:02.json b/BM-K/mistral-7b-it-v1.7.2/result_2023-11-30 06:16:02.json new file mode 100644 index 0000000000000000000000000000000000000000..b1564b599ed7912f00cbf8a3b2326986c647a44e --- /dev/null +++ b/BM-K/mistral-7b-it-v1.7.2/result_2023-11-30 06:16:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.01407722310847014, + "acc_norm": 0.41467576791808874, + "acc_norm_stderr": 0.014397070564409175 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3996215893248357, + "acc_stderr": 0.004888194985997395, + "acc_norm": 0.5103565026887075, + "acc_norm_stderr": 0.004988710917169331 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.038200425866029654, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.038200425866029654 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5134099616858238, + "acc_stderr": 0.017873531736510354, + "acc_norm": 0.5134099616858238, + "acc_norm_stderr": 0.017873531736510354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.02839089739686352, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.02839089739686352 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.03515520728670417, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.03515520728670417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674078, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113115, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113115 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0242785680243077, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0242785680243077 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048409, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048409 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829153, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829153 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650137, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650137 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29608938547486036, + "acc_stderr": 0.015268677317602298, + "acc_norm": 0.29608938547486036, + "acc_norm_stderr": 0.015268677317602298 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3435462842242503, + "acc_stderr": 0.012128961174190156, + "acc_norm": 0.3435462842242503, + "acc_norm_stderr": 0.012128961174190156 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875835, + "mc2": 0.4265196372341518, + "mc2_stderr": 0.015607268833983732 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3707201889020071, + "acc_stderr": 0.0166058012892126, + "acc_norm": 0.3955135773317591, + "acc_norm_stderr": 0.01681081590220604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.7.2", + "model_sha": "dbbe8fc186f5371e9ca608b9650e387b20de7ae3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.7.3/result_2023-12-04 23:53:23.json b/BM-K/mistral-7b-it-v1.7.3/result_2023-12-04 23:53:23.json new file mode 100644 index 0000000000000000000000000000000000000000..33826632a9c63e1d93747609071b74609b112975 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.7.3/result_2023-12-04 23:53:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34215017064846415, + "acc_stderr": 0.013864152159177278, + "acc_norm": 0.3771331058020478, + "acc_norm_stderr": 0.014163366896192598 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37880900219079866, + "acc_stderr": 0.00484099059349469, + "acc_norm": 0.4870543716391157, + "acc_norm_stderr": 0.004988108663179766 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4623243933588761, + "acc_stderr": 0.017829131764287198, + "acc_norm": 0.4623243933588761, + "acc_norm_stderr": 0.017829131764287198 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853441, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853441 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929186, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929186 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828061, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370333, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3871794871794872, + "acc_stderr": 0.02469721693087895, + "acc_norm": 0.3871794871794872, + "acc_norm_stderr": 0.02469721693087895 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761543, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761543 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.03023638994217309, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.03023638994217309 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702862, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702862 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228405, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.035676037996391706, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.035676037996391706 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.02375292871211214, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.02375292871211214 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.02740204204026994, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.02740204204026994 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949098, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949098 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142628, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142628 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.01957695312208883, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.01957695312208883 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22346368715083798, + "acc_stderr": 0.013932068638579771, + "acc_norm": 0.22346368715083798, + "acc_norm_stderr": 0.013932068638579771 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.02714627193662517, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.02714627193662517 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235926, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235926 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3324641460234681, + "acc_stderr": 0.012032022332260514, + "acc_norm": 0.3324641460234681, + "acc_norm_stderr": 0.012032022332260514 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.0154610276272536, + "mc2": 0.42128232388140774, + "mc2_stderr": 0.015666521111746597 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45336481700118064, + "acc_stderr": 0.017115418225226872, + "acc_norm": 0.49940968122786306, + "acc_norm_stderr": 0.017190342123448662 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.7.3", + "model_sha": "485019f55449b1adffd92e017a85bfe922cac2dd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-ko-7b-it-v2.0.0/result_2023-12-23 00:56:47.json b/BM-K/mistral-ko-7b-it-v2.0.0/result_2023-12-23 00:56:47.json new file mode 100644 index 0000000000000000000000000000000000000000..4bde63b32c80f56968c7065f2cb13f71c188c9ee --- /dev/null +++ b/BM-K/mistral-ko-7b-it-v2.0.0/result_2023-12-23 00:56:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179344, + "acc_norm": 0.39078498293515357, + "acc_norm_stderr": 0.014258563880513778 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37970523800039835, + "acc_stderr": 0.0048432163250902655, + "acc_norm": 0.4901414060944035, + "acc_norm_stderr": 0.004988811384747425 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.017852981266633955, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.017852981266633955 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562786, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562786 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129274, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129274 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.02686462436675666, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.02686462436675666 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470867, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470867 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611317, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3039106145251397, + "acc_stderr": 0.015382845587584518, + "acc_norm": 0.3039106145251397, + "acc_norm_stderr": 0.015382845587584518 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824866, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824866 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666544, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666544 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522512, + "mc2": 0.43306568977437526, + "mc2_stderr": 0.015345933860590263 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747425 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-ko-7b-it-v2.0.0", + "model_sha": "344b5f989128dc9f7a1bb3a1e8bbfe8b50a4159d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-ko-7b-it-v2.0.1/result_2023-12-26 12:34:11.json b/BM-K/mistral-ko-7b-it-v2.0.1/result_2023-12-26 12:34:11.json new file mode 100644 index 0000000000000000000000000000000000000000..5921661ce1bafc157205e5758972e8eeea671cef --- /dev/null +++ b/BM-K/mistral-ko-7b-it-v2.0.1/result_2023-12-26 12:34:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3395904436860068, + "acc_stderr": 0.013839039762820164, + "acc_norm": 0.378839590443686, + "acc_norm_stderr": 0.014175915490000322 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37890858394742083, + "acc_stderr": 0.004841238763529383, + "acc_norm": 0.4918342959569807, + "acc_norm_stderr": 0.004989115942570063 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.017857770704901018, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.017857770704901018 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255099, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255099 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752045, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413317, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413317 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48623853211009177, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.48623853211009177, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138286, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401154, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401154 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29608938547486036, + "acc_stderr": 0.01526867731760228, + "acc_norm": 0.29608938547486036, + "acc_norm_stderr": 0.01526867731760228 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.01201414210184298, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.01201414210184298 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216740976, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216740976 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219376, + "mc2": 0.44088319088488914, + "mc2_stderr": 0.01533044885511757 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5277449822904369, + "acc_stderr": 0.01716386797945602, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-ko-7b-it-v2.0.1", + "model_sha": "5482aa57e129559221c5109620df556b75e70f3a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.0/result_2023-10-06 06:41:38.json b/BM-K/polyglot-ko-1.3b-it-v1.0/result_2023-10-06 06:41:38.json new file mode 100644 index 0000000000000000000000000000000000000000..f44ad05440599fd4ef4e477f7afa330315b0b06d --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.0/result_2023-10-06 06:41:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22696245733788395, + "acc_stderr": 0.012240491536132861, + "acc_norm": 0.2773037542662116, + "acc_norm_stderr": 0.013082095839059374 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33578968333001397, + "acc_stderr": 0.004713006072807706, + "acc_norm": 0.41585341565425216, + "acc_norm_stderr": 0.0049186120989440285 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.015745497169049057, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.015745497169049057 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.251063829787234, + "acc_stderr": 0.028346963777162452, + "acc_norm": 0.251063829787234, + "acc_norm_stderr": 0.028346963777162452 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818784, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818784 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176892, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176892 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.020932445774463185, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.020932445774463185 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885193, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.02812096650391441, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.02812096650391441 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241238, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114993, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114993 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729602, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729602 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.03029957466478815, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.03029957466478815 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935574, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935574 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.02465968518596728, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.02465968518596728 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860688, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860688 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.15789473684210525, + "acc_stderr": 0.03430265978485698, + "acc_norm": 0.15789473684210525, + "acc_norm_stderr": 0.03430265978485698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24036697247706423, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.24036697247706423, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604673, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604673 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.02417084087934102, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.02417084087934102 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.23140495867768596, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.23140495867768596, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.01755581809132226, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.01755581809132226 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843014, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.033981108902946366, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.033981108902946366 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.14, + "acc_stderr": 0.03487350880197771, + "acc_norm": 0.14, + "acc_norm_stderr": 0.03487350880197771 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146292, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146292 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784586, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784586 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.01086543669078027, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.01086543669078027 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02933116229425173, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02933116229425173 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.031922715695483, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.031922715695483 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715008, + "mc2": 0.41338491158026774, + "mc2_stderr": 0.01512108388775634 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3116883116883117, + "acc_stderr": 0.015924567607358324, + "acc_norm": 0.3919716646989374, + "acc_norm_stderr": 0.016784332119424077 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.0", + "model_sha": "2f5b0dfed443e3a89c13a13b48d6fe6838c86e67", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.1/result_2023-10-06 07:23:29.json b/BM-K/polyglot-ko-1.3b-it-v1.1/result_2023-10-06 07:23:29.json new file mode 100644 index 0000000000000000000000000000000000000000..68b6b12533d10aa6245d0830b35d2862378720a7 --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.1/result_2023-10-06 07:23:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2363481228668942, + "acc_stderr": 0.012414960524301818, + "acc_norm": 0.2841296928327645, + "acc_norm_stderr": 0.013179442447653887 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3379804819757021, + "acc_stderr": 0.004720551323547123, + "acc_norm": 0.4192391953794065, + "acc_norm_stderr": 0.004924261467934422 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457923, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457923 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24521072796934865, + "acc_stderr": 0.01538435228454394, + "acc_norm": 0.24521072796934865, + "acc_norm_stderr": 0.01538435228454394 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.037498507091740234, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.037498507091740234 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.028504856470514192, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.028504856470514192 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.026003301117885142, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.026003301117885142 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969174, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969174 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.032087795587867514, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.032087795587867514 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727771, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727771 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.02127839386358628, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.02127839386358628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.02564938106302926, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.02564938106302926 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.25213675213675213, + "acc_stderr": 0.02844796547623101, + "acc_norm": 0.25213675213675213, + "acc_norm_stderr": 0.02844796547623101 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.02674989977124124, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.02674989977124124 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072773, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072773 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1791907514450867, + "acc_stderr": 0.0292425130590633, + "acc_norm": 0.1791907514450867, + "acc_norm_stderr": 0.0292425130590633 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194974, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194974 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123567, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123567 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2993827160493827, + "acc_stderr": 0.02548311560119547, + "acc_norm": 0.2993827160493827, + "acc_norm_stderr": 0.02548311560119547 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.03646758875075566, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.03646758875075566 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604672, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604672 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.14, + "acc_stderr": 0.03487350880197771, + "acc_norm": 0.14, + "acc_norm_stderr": 0.03487350880197771 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.040261875275912046, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.040261875275912046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.017848089574913222, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.017848089574913222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643525, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643525 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311183, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311183 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.02916273841024977, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.02916273841024977 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.02782078198114968, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.02782078198114968 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23989569752281617, + "acc_stderr": 0.010906282617981653, + "acc_norm": 0.23989569752281617, + "acc_norm_stderr": 0.010906282617981653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591362, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.031922715695482995, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.031922715695482995 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.01505186948671501, + "mc2": 0.4174341547322483, + "mc2_stderr": 0.015183101828823979 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31286894923258557, + "acc_stderr": 0.015941010118302658, + "acc_norm": 0.3872491145218418, + "acc_norm_stderr": 0.016747577991642785 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.1", + "model_sha": "78f227625af9b7013b69de4ef2a203ac71bdda5b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.2/result_2023-10-09 06:14:19.json b/BM-K/polyglot-ko-1.3b-it-v1.2/result_2023-10-09 06:14:19.json new file mode 100644 index 0000000000000000000000000000000000000000..d806392165ae8dee63a4c6bb4d96b8c71e37d9b8 --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.2/result_2023-10-09 06:14:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2551194539249147, + "acc_stderr": 0.012739038695202109, + "acc_norm": 0.30119453924914674, + "acc_norm_stderr": 0.01340674176784762 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3379804819757021, + "acc_stderr": 0.0047205513235471196, + "acc_norm": 0.4176458872734515, + "acc_norm_stderr": 0.00492163264510238 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.29118773946360155, + "acc_stderr": 0.016246087069701393, + "acc_norm": 0.29118773946360155, + "acc_norm_stderr": 0.016246087069701393 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292326, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292326 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2540192926045016, + "acc_stderr": 0.02472386150477169, + "acc_norm": 0.2540192926045016, + "acc_norm_stderr": 0.02472386150477169 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.031493846709941306, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.031493846709941306 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287414, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287414 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932026, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932026 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121626, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121626 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243838, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243838 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.025822106119415898, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.025822106119415898 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.027778835904935437, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.027778835904935437 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.039559328617958335, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.039559328617958335 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275815, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275815 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.033742355504256936, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.033742355504256936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.208955223880597, + "acc_stderr": 0.028748298931728665, + "acc_norm": 0.208955223880597, + "acc_norm_stderr": 0.028748298931728665 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.031265112061730424, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.031265112061730424 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.025171041915309684, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.025171041915309684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.0329229663915514, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.0329229663915514 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.018272575810231863, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.018272575810231863 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.025058503316958157, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.025058503316958157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.0306436070716771, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.0306436070716771 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.01788318813466719, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.01788318813466719 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190735, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190735 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079101, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079101 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2670391061452514, + "acc_stderr": 0.014796502622562544, + "acc_norm": 0.2670391061452514, + "acc_norm_stderr": 0.014796502622562544 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.026431329870789513, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.026431329870789513 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23598435462842243, + "acc_stderr": 0.010844802669662682, + "acc_norm": 0.23598435462842243, + "acc_norm_stderr": 0.010844802669662682 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.031145570659486782, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.031145570659486782 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2252141982864137, + "mc1_stderr": 0.01462324076802348, + "mc2": 0.4080616788903193, + "mc2_stderr": 0.015242253889585933 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31759149940968123, + "acc_stderr": 0.016005581876229306, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.2", + "model_sha": "d1a6abed1624c40b91b5df3acb5e245e281adc18", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.3/result_2023-10-09 06:23:09.json b/BM-K/polyglot-ko-1.3b-it-v1.3/result_2023-10-09 06:23:09.json new file mode 100644 index 0000000000000000000000000000000000000000..6841584f85702163e924102d33e9c561d27da7bf --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.3/result_2023-10-09 06:23:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2619453924914676, + "acc_stderr": 0.012849054826858117, + "acc_norm": 0.30802047781569963, + "acc_norm_stderr": 0.01349142951729204 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33957379008165706, + "acc_stderr": 0.0047259676848064045, + "acc_norm": 0.4195379406492731, + "acc_norm_stderr": 0.004924748500639348 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2784163473818646, + "acc_stderr": 0.01602829518899247, + "acc_norm": 0.2784163473818646, + "acc_norm_stderr": 0.01602829518899247 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292326, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292326 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.036293353299478595, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.036293353299478595 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.29596412556053814, + "acc_stderr": 0.030636591348699796, + "acc_norm": 0.29596412556053814, + "acc_norm_stderr": 0.030636591348699796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270287, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270287 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.1724137931034483, + "acc_stderr": 0.031478307902595745, + "acc_norm": 0.1724137931034483, + "acc_norm_stderr": 0.031478307902595745 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.030176808288974337, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.030176808288974337 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.021278393863586282, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.021278393863586282 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144446, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.02564938106302925, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.02564938106302925 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.028286324075564393, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.028286324075564393 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507383, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507383 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.034791855725996586, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.034791855725996586 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.031157150869355568, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.031157150869355568 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.02468531686725781, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.02468531686725781 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.0244772228561351, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.0244772228561351 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.03097543638684542, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.03097543638684542 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21100917431192662, + "acc_stderr": 0.01749392240411265, + "acc_norm": 0.21100917431192662, + "acc_norm_stderr": 0.01749392240411265 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677105, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677105 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.017917974069594722, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.017917974069594722 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.02813968944485966, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.02813968944485966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497717, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497717 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.027472274473233818, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.027472274473233818 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3020408163265306, + "acc_stderr": 0.029393609319879818, + "acc_norm": 0.3020408163265306, + "acc_norm_stderr": 0.029393609319879818 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460288, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460288 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.010865436690780272, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.010865436690780272 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.03058759135160425, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.03058759135160425 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22888616891064872, + "mc1_stderr": 0.014706994909055027, + "mc2": 0.4031826036090223, + "mc2_stderr": 0.0151985432197755 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2975206611570248, + "acc_stderr": 0.01571774220508992, + "acc_norm": 0.37662337662337664, + "acc_norm_stderr": 0.016658799874051975 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.3", + "model_sha": "1df1840d994fed4d5806ca38746639407c9bb970", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.4/result_2023-10-09 06:31:19.json b/BM-K/polyglot-ko-1.3b-it-v1.4/result_2023-10-09 06:31:19.json new file mode 100644 index 0000000000000000000000000000000000000000..9006fa923ed269b630abfc3e6c94da3aa9d2a483 --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.4/result_2023-10-09 06:31:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25170648464163825, + "acc_stderr": 0.012682496334042963, + "acc_norm": 0.30887372013651876, + "acc_norm_stderr": 0.013501770929344003 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34096793467436765, + "acc_stderr": 0.004730658073041557, + "acc_norm": 0.4206333399721171, + "acc_norm_stderr": 0.004926518439372268 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326467, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326467 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.01598281477469563, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.01598281477469563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073462, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073462 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826373, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826373 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23829787234042554, + "acc_stderr": 0.027851252973889802, + "acc_norm": 0.23829787234042554, + "acc_norm_stderr": 0.027851252973889802 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824665, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824665 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.024619771956697165, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.024619771956697165 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.02715715047956382, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.02715715047956382 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808779, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808779 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.031204691225150006, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.031204691225150006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28974358974358977, + "acc_stderr": 0.023000628243687968, + "acc_norm": 0.28974358974358977, + "acc_norm_stderr": 0.023000628243687968 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358608, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358608 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022884, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022884 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2094017094017094, + "acc_stderr": 0.026655699653922737, + "acc_norm": 0.2094017094017094, + "acc_norm_stderr": 0.026655699653922737 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946459, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946459 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.18407960199004975, + "acc_stderr": 0.02740385941078684, + "acc_norm": 0.18407960199004975, + "acc_norm_stderr": 0.02740385941078684 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240018, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080342, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080342 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036843, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036843 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258172, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258172 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724148, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724148 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.024288533637726095, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.024288533637726095 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.03161877917935411, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.03161877917935411 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24770642201834864, + "acc_stderr": 0.018508143602547805, + "acc_norm": 0.24770642201834864, + "acc_norm_stderr": 0.018508143602547805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573982, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573982 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325004, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325004 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.017322789207784326, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.017322789207784326 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791047, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791047 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761987, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761987 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19117647058823528, + "acc_stderr": 0.023886881922440362, + "acc_norm": 0.19117647058823528, + "acc_norm_stderr": 0.023886881922440362 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.028795185574291282, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.028795185574291282 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24472573839662448, + "acc_stderr": 0.02798569938703642, + "acc_norm": 0.24472573839662448, + "acc_norm_stderr": 0.02798569938703642 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23272490221642764, + "acc_stderr": 0.0107925955538885, + "acc_norm": 0.23272490221642764, + "acc_norm_stderr": 0.0107925955538885 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.029983733055913623, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.029983733055913623 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871096, + "mc2": 0.414131633910044, + "mc2_stderr": 0.015365810716919849 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3105076741440378, + "acc_stderr": 0.015908004528762003, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.016637917789798742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.4", + "model_sha": "acbd40970c01a4b40debc0d9a9ac096a74673d74", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.6/result_2023-11-06 01:10:50.json b/BM-K/polyglot-ko-1.3b-it-v1.6/result_2023-11-06 01:10:50.json new file mode 100644 index 0000000000000000000000000000000000000000..7700635f470576d89fb808f70f17dfc0c101cd14 --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.6/result_2023-11-06 01:10:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25597269624573377, + "acc_stderr": 0.012753013241244508, + "acc_norm": 0.295221843003413, + "acc_norm_stderr": 0.013329750293382316 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3419637522405895, + "acc_stderr": 0.0047339804707992195, + "acc_norm": 0.4192391953794065, + "acc_norm_stderr": 0.004924261467934419 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.042450224863844935, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.042450224863844935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27330779054916987, + "acc_stderr": 0.01593668106262856, + "acc_norm": 0.27330779054916987, + "acc_norm_stderr": 0.01593668106262856 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.03547854198560826, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.03547854198560826 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.027678452578212383, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.027678452578212383 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2914798206278027, + "acc_stderr": 0.030500283176545913, + "acc_norm": 0.2914798206278027, + "acc_norm_stderr": 0.030500283176545913 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924812, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924812 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02242127361292372, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02242127361292372 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.0401910747255735, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.0401910747255735 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335127, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335127 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.25213675213675213, + "acc_stderr": 0.02844796547623101, + "acc_norm": 0.25213675213675213, + "acc_norm_stderr": 0.02844796547623101 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241238, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.0430911870994646, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.0430911870994646 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871937, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871937 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.03014777593540922, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.03014777593540922 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.03063114553919882, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.03063114553919882 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.02286083830923207, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.02286083830923207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.024748624490537368, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.024748624490537368 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.031821550509166484, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.031821550509166484 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343595, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343595 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1487603305785124, + "acc_stderr": 0.03248470083807195, + "acc_norm": 0.1487603305785124, + "acc_norm_stderr": 0.03248470083807195 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2630718954248366, + "acc_stderr": 0.017812676542320657, + "acc_norm": 0.2630718954248366, + "acc_norm_stderr": 0.017812676542320657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510937, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985992, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985992 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.027365861131513812, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.027365861131513812 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.0282638899437846, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.0282638899437846 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23468057366362452, + "acc_stderr": 0.010824026872449346, + "acc_norm": 0.23468057366362452, + "acc_norm_stderr": 0.010824026872449346 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02933116229425172, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02933116229425172 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23255813953488372, + "mc1_stderr": 0.014789157531080517, + "mc2": 0.40663525842480935, + "mc2_stderr": 0.01551567406322468 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3293978748524203, + "acc_stderr": 0.016158746868147143, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.6", + "model_sha": "97def0549ef147c96d755ba79a29c3efcdb3f737", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.7/result_2023-11-06 08:02:10.json b/BM-K/polyglot-ko-1.3b-it-v1.7/result_2023-11-06 08:02:10.json new file mode 100644 index 0000000000000000000000000000000000000000..44105c3f4c680e0901c3a5b4eced875fa7bfb0a1 --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.7/result_2023-11-06 08:02:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.23720136518771331, + "acc_stderr": 0.012430399829260861, + "acc_norm": 0.2960750853242321, + "acc_norm_stderr": 0.01334091608524627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3437562238597889, + "acc_stderr": 0.004739902411944544, + "acc_norm": 0.4171479784903406, + "acc_norm_stderr": 0.004920800313232743 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.031581495393387324, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.031581495393387324 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.04620284082280039, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.04620284082280039 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25287356321839083, + "acc_stderr": 0.015543377313719681, + "acc_norm": 0.25287356321839083, + "acc_norm_stderr": 0.015543377313719681 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.0261488180184245, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0261488180184245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1686746987951807, + "acc_stderr": 0.029152009627856544, + "acc_norm": 0.1686746987951807, + "acc_norm_stderr": 0.029152009627856544 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2379421221864952, + "acc_stderr": 0.024185150647818707, + "acc_norm": 0.2379421221864952, + "acc_norm_stderr": 0.024185150647818707 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.15695067264573992, + "acc_stderr": 0.0244135871749074, + "acc_norm": 0.15695067264573992, + "acc_norm_stderr": 0.0244135871749074 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.033832012232444426, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.033832012232444426 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.02702543349888237, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.02702543349888237 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3384615384615385, + "acc_stderr": 0.023991500500313033, + "acc_norm": 0.3384615384615385, + "acc_norm_stderr": 0.023991500500313033 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.03031509928561773, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.03031509928561773 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3225806451612903, + "acc_stderr": 0.026593084516572277, + "acc_norm": 0.3225806451612903, + "acc_norm_stderr": 0.026593084516572277 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.02828632407556441, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.02828632407556441 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844082, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844082 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.029475250236017183, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.029475250236017183 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21676300578034682, + "acc_stderr": 0.022183477668412853, + "acc_norm": 0.21676300578034682, + "acc_norm_stderr": 0.022183477668412853 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.03487825168497892, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.03487825168497892 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.02465968518596728, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.02465968518596728 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3577981651376147, + "acc_stderr": 0.02055206078482781, + "acc_norm": 0.3577981651376147, + "acc_norm_stderr": 0.02055206078482781 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.025829163272757485, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.025829163272757485 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.017776947157528027, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.017776947157528027 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.025892151156709405, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.025892151156709405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784593, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784593 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2561929595827901, + "acc_stderr": 0.011149173153110583, + "acc_norm": 0.2561929595827901, + "acc_norm_stderr": 0.011149173153110583 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501947, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501947 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23011015911872704, + "mc1_stderr": 0.014734557959807762, + "mc2": 0.4165738353723906, + "mc2_stderr": 0.015472231665083085 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3624557260920897, + "acc_stderr": 0.016527131240453713, + "acc_norm": 0.44510035419126326, + "acc_norm_stderr": 0.017086417431005474 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.7", + "model_sha": "6f92e92497cdde6029d4cb4ea13380933e9c1b5f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/yi-ko-6b-it-v1.0.0/result_2023-12-05 09:11:37.json b/BM-K/yi-ko-6b-it-v1.0.0/result_2023-12-05 09:11:37.json new file mode 100644 index 0000000000000000000000000000000000000000..850d698b6237fe6f907136d34c701997792ea682 --- /dev/null +++ b/BM-K/yi-ko-6b-it-v1.0.0/result_2023-12-05 09:11:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470137, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137991 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41894045010953995, + "acc_stderr": 0.0049237725818484955, + "acc_norm": 0.5630352519418442, + "acc_norm_stderr": 0.0049499693630176535 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5542784163473818, + "acc_stderr": 0.017774297282479506, + "acc_norm": 0.5542784163473818, + "acc_norm_stderr": 0.017774297282479506 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016337, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016337 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349472, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723456, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723456 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228405, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.02455229220934265, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.02455229220934265 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594384, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594384 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6422018348623854, + "acc_stderr": 0.020552060784827818, + "acc_norm": 0.6422018348623854, + "acc_norm_stderr": 0.020552060784827818 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5620915032679739, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.5620915032679739, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4395424836601307, + "acc_stderr": 0.020079420408087918, + "acc_norm": 0.4395424836601307, + "acc_norm_stderr": 0.020079420408087918 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.032259413526312945, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.032259413526312945 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303656, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303656 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.46691176470588236, + "acc_stderr": 0.030306257722468317, + "acc_norm": 0.46691176470588236, + "acc_norm_stderr": 0.030306257722468317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32333767926988266, + "acc_stderr": 0.011946565758447204, + "acc_norm": 0.32333767926988266, + "acc_norm_stderr": 0.011946565758447204 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165636, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522512, + "mc2": 0.40241254956351097, + "mc2_stderr": 0.015340553744152264 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5112160566706021, + "acc_stderr": 0.01718602846948929, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911144 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/yi-ko-6b-it-v1.0.0", + "model_sha": "1401792e5c974a79e0f6ccb7f060003d0d54e2e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/yi-ko-6b-it-v1.0.3/result_2023-12-14 04:08:57.json b/BM-K/yi-ko-6b-it-v1.0.3/result_2023-12-14 04:08:57.json new file mode 100644 index 0000000000000000000000000000000000000000..28989edc70eba3c34454a421c031dda73801a7e9 --- /dev/null +++ b/BM-K/yi-ko-6b-it-v1.0.3/result_2023-12-14 04:08:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28242320819112626, + "acc_stderr": 0.013155456884097218, + "acc_norm": 0.34044368600682595, + "acc_norm_stderr": 0.01384746051889298 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3498307110137423, + "acc_stderr": 0.004759416464201141, + "acc_norm": 0.43397729535949015, + "acc_norm_stderr": 0.004946089230153021 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24393358876117496, + "acc_stderr": 0.015357212665829489, + "acc_norm": 0.24393358876117496, + "acc_norm_stderr": 0.015357212665829489 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292323, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292323 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071854, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071854 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.20257234726688103, + "acc_stderr": 0.022827317491059686, + "acc_norm": 0.20257234726688103, + "acc_norm_stderr": 0.022827317491059686 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1919191919191919, + "acc_stderr": 0.028057791672989017, + "acc_norm": 0.1919191919191919, + "acc_norm_stderr": 0.028057791672989017 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.03664666337225256, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.03664666337225256 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33076923076923076, + "acc_stderr": 0.023854795680971142, + "acc_norm": 0.33076923076923076, + "acc_norm_stderr": 0.023854795680971142 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.039578354719809805, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.039578354719809805 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.17733990147783252, + "acc_stderr": 0.026874337276808342, + "acc_norm": 0.17733990147783252, + "acc_norm_stderr": 0.026874337276808342 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553873, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553873 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914404, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31840796019900497, + "acc_stderr": 0.032941184790540964, + "acc_norm": 0.31840796019900497, + "acc_norm_stderr": 0.032941184790540964 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.02094048156533483, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.02094048156533483 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080343, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080343 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587403, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400572, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400572 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.034998072761933376, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.034998072761933376 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.017923087667803053, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.017923087667803053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.026493033225145894, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.026493033225145894 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2238562091503268, + "acc_stderr": 0.016863008585416617, + "acc_norm": 0.2238562091503268, + "acc_norm_stderr": 0.016863008585416617 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.02484792135806396, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.02484792135806396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.028795185574291268, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.028795185574291268 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.029936696387138605, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.029936696387138605 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25358539765319427, + "acc_stderr": 0.011111715336101129, + "acc_norm": 0.25358539765319427, + "acc_norm_stderr": 0.011111715336101129 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083291, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083291 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.036462049632538115, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.036462049632538115 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.01578537085839671, + "mc2": 0.44826493419395586, + "mc2_stderr": 0.015501611237277205 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2809917355371901, + "acc_stderr": 0.015453559655458275, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.016689333596980133 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/yi-ko-6b-it-v1.0.3", + "model_sha": "b52413e092bbe1a6319d94569f07891c2d0c95f0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Cartinoe5930/KoRAE-13b-DPO/result_2023-11-28 08:26:01.json b/Cartinoe5930/KoRAE-13b-DPO/result_2023-11-28 08:26:01.json new file mode 100644 index 0000000000000000000000000000000000000000..c5b865a63bf20f4c69ea2c2e10929cb36ccd84d9 --- /dev/null +++ b/Cartinoe5930/KoRAE-13b-DPO/result_2023-11-28 08:26:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39078498293515357, + "acc_stderr": 0.014258563880513785, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.01457558392201967 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42362079267078273, + "acc_stderr": 0.004931219148182245, + "acc_norm": 0.5753833897629954, + "acc_norm_stderr": 0.004932745013072709 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5325670498084292, + "acc_stderr": 0.017841995750520874, + "acc_norm": 0.5325670498084292, + "acc_norm_stderr": 0.017841995750520874 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.02508830145469484, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.02508830145469484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.03125610824421881, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.03125610824421881 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.02345603738398203, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.02345603738398203 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.01959402113657745, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.01959402113657745 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048228, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048228 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253595, + "mc2": 0.41276206628297735, + "mc2_stderr": 0.014870061139692456 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48288075560802834, + "acc_stderr": 0.017180275246085622, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.017090852631668336 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Cartinoe5930/KoRAE-13b-DPO", + "model_sha": "70de20a53e3dc47eeb7b8ddc0864d81caff6038c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Cartinoe5930/KoRAE-13b/result_2023-11-26 13:45:22.json b/Cartinoe5930/KoRAE-13b/result_2023-11-26 13:45:22.json new file mode 100644 index 0000000000000000000000000000000000000000..6d2c67f1ec90b1d5adac5c8f12eb493b206b7883 --- /dev/null +++ b/Cartinoe5930/KoRAE-13b/result_2023-11-26 13:45:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3924914675767918, + "acc_stderr": 0.01426963463567073, + "acc_norm": 0.46331058020477817, + "acc_norm_stderr": 0.014572000527756994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42202748456482775, + "acc_stderr": 0.004928735103635848, + "acc_norm": 0.572495518820952, + "acc_norm_stderr": 0.004937054233711569 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5338441890166028, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.5338441890166028, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823018, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.02345603738398203, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.02345603738398203 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.01962744474841224, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.01962744474841224 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.01185591158704823, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.01185591158704823 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522505, + "mc2": 0.4107667883351212, + "mc2_stderr": 0.014847145006763885 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556228 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Cartinoe5930/KoRAE-13b", + "model_sha": "ea6b5bc5c26f06cbb2a0cb973b691f4080bbee72", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Cartinoe5930/original-KoRAE-13b-3ep/result_2023-11-29 23:33:12.json b/Cartinoe5930/original-KoRAE-13b-3ep/result_2023-11-29 23:33:12.json new file mode 100644 index 0000000000000000000000000000000000000000..b1023a16186c115f32a567f88db5d6c76ce14317 --- /dev/null +++ b/Cartinoe5930/original-KoRAE-13b-3ep/result_2023-11-29 23:33:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3856655290102389, + "acc_stderr": 0.014224250973257182, + "acc_norm": 0.44368600682593856, + "acc_norm_stderr": 0.014518421825670447 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4207329217287393, + "acc_stderr": 0.004926678108601343, + "acc_norm": 0.5697072296355308, + "acc_norm_stderr": 0.004941051795214797 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.541507024265645, + "acc_stderr": 0.017818248603465578, + "acc_norm": 0.541507024265645, + "acc_norm_stderr": 0.017818248603465578 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977978, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977978 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.040824829046386284, + "acc_norm": 0.4, + "acc_norm_stderr": 0.040824829046386284 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.040894654493255835, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.040894654493255835 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.021311335009708575, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.021311335009708575 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171563, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171563 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.044492703500683836, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.044492703500683836 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.01969145905235415, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.01969145905235415 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150381, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150381 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846147, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846147 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237024, + "mc2": 0.417499174328329, + "mc2_stderr": 0.014766097200285613 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.01717567127983645, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Cartinoe5930/original-KoRAE-13b-3ep", + "model_sha": "6c109c149338c1aff8de13e82058abedb03b754d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Cartinoe5930/original-KoRAE-13b/result_2023-11-28 09:14:48.json b/Cartinoe5930/original-KoRAE-13b/result_2023-11-28 09:14:48.json new file mode 100644 index 0000000000000000000000000000000000000000..72a97ed91ced9b44efb0628457bce6c2dbc03655 --- /dev/null +++ b/Cartinoe5930/original-KoRAE-13b/result_2023-11-28 09:14:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.01423008476191047, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4221270663214499, + "acc_stderr": 0.00492889189587429, + "acc_norm": 0.5704043019318861, + "acc_norm_stderr": 0.004940067402031046 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5389527458492975, + "acc_stderr": 0.017825621793239006, + "acc_norm": 0.5389527458492975, + "acc_norm_stderr": 0.017825621793239006 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.03148955829745529, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.03148955829745529 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.02496268356433182, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.02496268356433182 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.03125610824421881, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.03125610824421881 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.042663394431593955, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.042663394431593955 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353992, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353992 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604675, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604675 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.01954210156485412, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.01954210156485412 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411952, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411952 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3122555410691004, + "acc_stderr": 0.011835798135683175, + "acc_norm": 0.3122555410691004, + "acc_norm_stderr": 0.011835798135683175 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.01534540948555797, + "mc2": 0.4067288610044621, + "mc2_stderr": 0.014720415548716639 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.017185069732676524, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972202 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Cartinoe5930/original-KoRAE-13b", + "model_sha": "5db145b0f9576d388f073cd01036cd9c72f01860", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Cartinoe5930/weak-KoRAE-13b/result_2023-11-28 02:02:22.json b/Cartinoe5930/weak-KoRAE-13b/result_2023-11-28 02:02:22.json new file mode 100644 index 0000000000000000000000000000000000000000..d957eb8886a06f834cdf7b51054b61a301f9d94e --- /dev/null +++ b/Cartinoe5930/weak-KoRAE-13b/result_2023-11-28 02:02:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000324, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633835 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41734714200358497, + "acc_stderr": 0.0049211338649318885, + "acc_norm": 0.5679147580163314, + "acc_norm_stderr": 0.0049435372423444176 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.017869330154003705, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.017869330154003705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.032321469162244695, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.032321469162244695 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539746, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539746 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230175, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230175 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.01946951822157369, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.01946951822157369 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085455, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085455 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.01541524174023703, + "mc2": 0.4040029626548701, + "mc2_stderr": 0.014782276857043152 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Cartinoe5930/weak-KoRAE-13b", + "model_sha": "f6d72bd200da4870967487484595ac16355c52fd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Chang-Su/llama-2-13b-chat-ko/result_2023-10-18 16:07:29.json b/Chang-Su/llama-2-13b-chat-ko/result_2023-10-18 16:07:29.json new file mode 100644 index 0000000000000000000000000000000000000000..6419bb96be2d940e45a2e88853f2d86eafe98065 --- /dev/null +++ b/Chang-Su/llama-2-13b-chat-ko/result_2023-10-18 16:07:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3037542662116041, + "acc_stderr": 0.013438909184778759, + "acc_norm": 0.3464163822525597, + "acc_norm_stderr": 0.013905011180063251 + }, + "harness|ko_hellaswag|10": { + "acc": 0.350726946823342, + "acc_stderr": 0.0047622234924352535, + "acc_norm": 0.45429197371041624, + "acc_norm_stderr": 0.004968888130290068 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4610472541507024, + "acc_stderr": 0.01782562179323902, + "acc_norm": 0.4610472541507024, + "acc_norm_stderr": 0.01782562179323902 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353228, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353228 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.02801365189199507, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.02801365189199507 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41414141414141414, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.41414141414141414, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767762, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767762 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032488, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032488 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.03265903381186194, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.03265903381186194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3584905660377358, + "acc_stderr": 0.029514703583981765, + "acc_norm": 0.3584905660377358, + "acc_norm_stderr": 0.029514703583981765 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881564, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881564 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4925373134328358, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.4925373134328358, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535903, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535903 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027125115513166865, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027125115513166865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537318, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537318 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.02768418188330289, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.02768418188330289 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.018718067052623227, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.018718067052623227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953202, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953202 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.02679956202488769, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.02679956202488769 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4092827004219409, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.4092827004219409, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823062999, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823062999 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237035, + "mc2": 0.42145051773986575, + "mc2_stderr": 0.015233960921162444 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31759149940968123, + "acc_stderr": 0.0160055818762293, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Chang-Su/llama-2-13b-chat-ko", + "model_sha": "3a82a33f61584cbe72dc32c15d55bfd182cefd8b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DILAB-HYU/KoQuality-Polyglot-5.8b/result_2023-10-12 13:21:04.json b/DILAB-HYU/KoQuality-Polyglot-5.8b/result_2023-10-12 13:21:04.json new file mode 100644 index 0000000000000000000000000000000000000000..86130d043c4fa0acf617a0cf455d394b8310c9d8 --- /dev/null +++ b/DILAB-HYU/KoQuality-Polyglot-5.8b/result_2023-10-12 13:21:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2977815699658703, + "acc_stderr": 0.01336308010724449, + "acc_norm": 0.3370307167235495, + "acc_norm_stderr": 0.013813476652902272 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38458474407488547, + "acc_stderr": 0.004855027248398158, + "acc_norm": 0.4970125473013344, + "acc_norm_stderr": 0.004989692344313998 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.1695906432748538, + "acc_stderr": 0.028782108105401712, + "acc_norm": 0.1695906432748538, + "acc_norm_stderr": 0.028782108105401712 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20561941251596424, + "acc_stderr": 0.014452500456785823, + "acc_norm": 0.20561941251596424, + "acc_norm_stderr": 0.014452500456785823 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.17872340425531916, + "acc_stderr": 0.025045373272050957, + "acc_norm": 0.17872340425531916, + "acc_norm_stderr": 0.025045373272050957 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.031417842916639245, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.031417842916639245 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.1031390134529148, + "acc_stderr": 0.020412564289839272, + "acc_norm": 0.1031390134529148, + "acc_norm_stderr": 0.020412564289839272 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732523, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732523 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.024359581465396983, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.024359581465396983 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678241, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678241 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.02652270967466777, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.02652270967466777 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.17094017094017094, + "acc_stderr": 0.024662496845209828, + "acc_norm": 0.17094017094017094, + "acc_norm_stderr": 0.024662496845209828 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443866, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443866 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948365, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948365 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.022497230190967547, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.022497230190967547 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.033519538795212696, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.033519538795212696 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3541284403669725, + "acc_stderr": 0.0205047290138291, + "acc_norm": 0.3541284403669725, + "acc_norm_stderr": 0.0205047290138291 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.026173908506718576, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.026173908506718576 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.12396694214876033, + "acc_stderr": 0.03008309871603522, + "acc_norm": 0.12396694214876033, + "acc_norm_stderr": 0.03008309871603522 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926606, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926606 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.016547148636203147, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.016547148636203147 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266733, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266733 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.03562367850095391, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.03562367850095391 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556163, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.19831223628691982, + "acc_stderr": 0.025955020841621112, + "acc_norm": 0.19831223628691982, + "acc_norm_stderr": 0.025955020841621112 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417356, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417356 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570342, + "mc2": 0.4081734277840062, + "mc2_stderr": 0.014989124693241153 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36835891381345925, + "acc_stderr": 0.01658385898263907, + "acc_norm": 0.46871310507674147, + "acc_norm_stderr": 0.017156666859785445 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DILAB-HYU/KoQuality-Polyglot-5.8b", + "model_sha": "270b6dd7bb08032bb13164b7438b2bac83709ae4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DILAB-HYU/koquality-ko-ref-llama2-7b/result_2023-11-05 11:52:21.json b/DILAB-HYU/koquality-ko-ref-llama2-7b/result_2023-11-05 11:52:21.json new file mode 100644 index 0000000000000000000000000000000000000000..07206e453cbd2b46e708242213929b8311ab4360 --- /dev/null +++ b/DILAB-HYU/koquality-ko-ref-llama2-7b/result_2023-11-05 11:52:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.01369743246669324, + "acc_norm": 0.3779863481228669, + "acc_norm_stderr": 0.0141696645203031 + }, + "harness|ko_hellaswag|10": { + "acc": 0.377414857598088, + "acc_stderr": 0.0048374934398742984, + "acc_norm": 0.48755228042222665, + "acc_norm_stderr": 0.004988234881206747 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3116219667943806, + "acc_stderr": 0.016562433867284176, + "acc_norm": 0.3116219667943806, + "acc_norm_stderr": 0.016562433867284176 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.030363582197238167, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.030363582197238167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.23493975903614459, + "acc_stderr": 0.03300533186128922, + "acc_norm": 0.23493975903614459, + "acc_norm_stderr": 0.03300533186128922 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632945, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632945 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.03021683101150876, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.03021683101150876 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.32323232323232326, + "acc_stderr": 0.033322999210706444, + "acc_norm": 0.32323232323232326, + "acc_norm_stderr": 0.033322999210706444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.029597329730978096, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.029597329730978096 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.025988500792411894, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.025988500792411894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708087, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708087 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.040693063197213775, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.040693063197213775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095932, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095932 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.29850746268656714, + "acc_stderr": 0.032357437893550424, + "acc_norm": 0.29850746268656714, + "acc_norm_stderr": 0.032357437893550424 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508283, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.03680350371286462, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.03680350371286462 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2808641975308642, + "acc_stderr": 0.02500646975579921, + "acc_norm": 0.2808641975308642, + "acc_norm_stderr": 0.02500646975579921 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916649, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916649 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28623853211009176, + "acc_stderr": 0.019379436628919968, + "acc_norm": 0.28623853211009176, + "acc_norm_stderr": 0.019379436628919968 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23366013071895425, + "acc_stderr": 0.017119158496044506, + "acc_norm": 0.23366013071895425, + "acc_norm_stderr": 0.017119158496044506 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140245, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140245 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467761, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467761 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966358, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966358 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.031362502409358915, + "acc_norm": 0.4, + "acc_norm_stderr": 0.031362502409358915 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.02904133351059804, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.02904133351059804 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27835723598435463, + "acc_stderr": 0.011446990197380984, + "acc_norm": 0.27835723598435463, + "acc_norm_stderr": 0.011446990197380984 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.01517698502770768, + "mc2": 0.41091136339297607, + "mc2_stderr": 0.014831976469805178 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3270365997638725, + "acc_stderr": 0.016129047485457022, + "acc_norm": 0.40968122786304606, + "acc_norm_stderr": 0.01690756819221947 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DILAB-HYU/koquality-ko-ref-llama2-7b", + "model_sha": "3ef89d06e678a10cd678b2f0258d0f4a0ef2b5bb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DILAB-HYU/koquality-polyglot-1.3b/result_2023-10-30 14:22:39.json b/DILAB-HYU/koquality-polyglot-1.3b/result_2023-10-30 14:22:39.json new file mode 100644 index 0000000000000000000000000000000000000000..84d4526e803ff7a428d9b968808e7a9131880576 --- /dev/null +++ b/DILAB-HYU/koquality-polyglot-1.3b/result_2023-10-30 14:22:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2363481228668942, + "acc_stderr": 0.012414960524301823, + "acc_norm": 0.28924914675767915, + "acc_norm_stderr": 0.013250012579393443 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3379804819757021, + "acc_stderr": 0.004720551323547122, + "acc_norm": 0.4183429595698068, + "acc_norm_stderr": 0.004922789247319879 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.041858325989283136, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.041858325989283136 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.01584243083526944, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.01584243083526944 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.035914440841969694, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.035914440841969694 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.028020226271200217, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.028020226271200217 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21084337349397592, + "acc_stderr": 0.0317555478662992, + "acc_norm": 0.21084337349397592, + "acc_norm_stderr": 0.0317555478662992 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2572347266881029, + "acc_stderr": 0.024826171289250888, + "acc_norm": 0.2572347266881029, + "acc_norm_stderr": 0.024826171289250888 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22137404580152673, + "acc_stderr": 0.0364129708131373, + "acc_norm": 0.22137404580152673, + "acc_norm_stderr": 0.0364129708131373 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124498, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124498 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.03095663632856654, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.03095663632856654 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132354, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678243, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678243 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029258, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029258 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21367521367521367, + "acc_stderr": 0.02685345037700916, + "acc_norm": 0.21367521367521367, + "acc_norm_stderr": 0.02685345037700916 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.0430911870994646, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.0430911870994646 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772432, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772432 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.02447722285613511, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.02447722285613511 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21834862385321102, + "acc_stderr": 0.017712600528722734, + "acc_norm": 0.21834862385321102, + "acc_norm_stderr": 0.017712600528722734 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102149, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102149 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.02367908986180772, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.02367908986180772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810537, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810537 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2107843137254902, + "acc_stderr": 0.01650047297902479, + "acc_norm": 0.2107843137254902, + "acc_norm_stderr": 0.01650047297902479 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3673469387755102, + "acc_stderr": 0.03086214492108755, + "acc_norm": 0.3673469387755102, + "acc_norm_stderr": 0.03086214492108755 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24472573839662448, + "acc_stderr": 0.027985699387036423, + "acc_norm": 0.24472573839662448, + "acc_norm_stderr": 0.027985699387036423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178472, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178472 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.0315841532404771, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.0315841532404771 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.41348688566296676, + "mc2_stderr": 0.015238831556708764 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3293978748524203, + "acc_stderr": 0.016158746868147143, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DILAB-HYU/koquality-polyglot-1.3b", + "model_sha": "ca9ba27cccf4065cf447f9fdd7d5aec1715a3175", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DILAB-HYU/koquality-polyglot-12.8b/result_2023-11-12 20:22:29.json b/DILAB-HYU/koquality-polyglot-12.8b/result_2023-11-12 20:22:29.json new file mode 100644 index 0000000000000000000000000000000000000000..039ee932e9ee8d05f5a93fe9158f92163fd097af --- /dev/null +++ b/DILAB-HYU/koquality-polyglot-12.8b/result_2023-11-12 20:22:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2986348122866894, + "acc_stderr": 0.013374078615068759, + "acc_norm": 0.35494880546075086, + "acc_norm_stderr": 0.013983036904094095 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4047998406691894, + "acc_stderr": 0.004898501014225842, + "acc_norm": 0.5283808006373233, + "acc_norm_stderr": 0.004981736689518753 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.03446296217088426, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.03446296217088426 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27458492975734355, + "acc_stderr": 0.015959829933084032, + "acc_norm": 0.27458492975734355, + "acc_norm_stderr": 0.015959829933084032 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18674698795180722, + "acc_stderr": 0.03033874914450058, + "acc_norm": 0.18674698795180722, + "acc_norm_stderr": 0.03033874914450058 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.026236965881153266, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.026236965881153266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.027205371538279472, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.027205371538279472 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.02458002892148101, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.02458002892148101 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02934311479809447, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02934311479809447 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.02560423347089911, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.02560423347089911 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072774, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072774 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804725, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804725 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.03456425745087, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.03456425745087 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.023135287974325618, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.023135287974325618 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.038270523579507554, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.038270523579507554 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.02525117393649502, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.02525117393649502 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.01822407811729906, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.01822407811729906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.025261691219729487, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.025261691219729487 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137283, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137283 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432403, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608044, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409162, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409162 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335317, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235922, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235922 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.1940928270042194, + "acc_stderr": 0.025744902532290927, + "acc_norm": 0.1940928270042194, + "acc_norm_stderr": 0.025744902532290927 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2542372881355932, + "acc_stderr": 0.011121129007840673, + "acc_norm": 0.2542372881355932, + "acc_norm_stderr": 0.011121129007840673 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041866, + "mc2": 0.3991183406834575, + "mc2_stderr": 0.01489709315395438 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3305785123966942, + "acc_stderr": 0.0161734232988457, + "acc_norm": 0.3837072018890201, + "acc_norm_stderr": 0.016718924637231826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DILAB-HYU/koquality-polyglot-12.8b", + "model_sha": "7cca798e18e44cebbde1c6d1f59162882c2bf254", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DILAB-HYU/koquality-polyglot-3.8b/result_2023-10-30 14:22:50.json b/DILAB-HYU/koquality-polyglot-3.8b/result_2023-10-30 14:22:50.json new file mode 100644 index 0000000000000000000000000000000000000000..4dd3e85c00130ec067da9ec58e55fcdcd9329997 --- /dev/null +++ b/DILAB-HYU/koquality-polyglot-3.8b/result_2023-10-30 14:22:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26109215017064846, + "acc_stderr": 0.012835523909473855, + "acc_norm": 0.3097269624573379, + "acc_norm_stderr": 0.013512058415238361 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36606253734315874, + "acc_stderr": 0.00480742334322458, + "acc_norm": 0.46016729735112527, + "acc_norm_stderr": 0.004973922192982238 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785138, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785138 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.027321078417387533, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.027321078417387533 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969174, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969174 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.030283995525884403, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.030283995525884403 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.023060438380857737, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.023060438380857737 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.025284416114900156, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.025284416114900156 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.18376068376068377, + "acc_stderr": 0.02537213967172293, + "acc_norm": 0.18376068376068377, + "acc_norm_stderr": 0.02537213967172293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899105, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724136, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724136 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.030769444967296014, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.030769444967296014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.033917503223216586, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.033917503223216586 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.25722543352601157, + "acc_stderr": 0.023532925431044287, + "acc_norm": 0.25722543352601157, + "acc_norm_stderr": 0.023532925431044287 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.025630824975621344, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.025630824975621344 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22752293577981653, + "acc_stderr": 0.0179744635787765, + "acc_norm": 0.22752293577981653, + "acc_norm_stderr": 0.0179744635787765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736386, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736386 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484375, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484375 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417362, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417362 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283335, + "mc2": 0.4152993218865631, + "mc2_stderr": 0.015196497707034719 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3116883116883117, + "acc_stderr": 0.015924567607358338, + "acc_norm": 0.39433293978748524, + "acc_norm_stderr": 0.016802090674893213 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DILAB-HYU/koquality-polyglot-3.8b", + "model_sha": "c07be8b24386d148dae0b95cf1beecfd5ce1b695", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DILAB-HYU/koquality-polyglot-ko-12.8b/result_2023-11-05 05:44:07.json b/DILAB-HYU/koquality-polyglot-ko-12.8b/result_2023-11-05 05:44:07.json new file mode 100644 index 0000000000000000000000000000000000000000..611d7db705f98a9c1303a8396c7886885e3211c7 --- /dev/null +++ b/DILAB-HYU/koquality-polyglot-ko-12.8b/result_2023-11-05 05:44:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3037542662116041, + "acc_stderr": 0.01343890918477875, + "acc_norm": 0.34044368600682595, + "acc_norm_stderr": 0.013847460518892978 + }, + "harness|ko_hellaswag|10": { + "acc": 0.394443337980482, + "acc_stderr": 0.004877319683639072, + "acc_norm": 0.5136427006572396, + "acc_norm_stderr": 0.004987923636628548 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.03446296217088426, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.03446296217088426 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26947637292464877, + "acc_stderr": 0.01586624307321507, + "acc_norm": 0.26947637292464877, + "acc_norm_stderr": 0.01586624307321507 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.02767845257821238, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.02767845257821238 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.23493975903614459, + "acc_stderr": 0.03300533186128922, + "acc_norm": 0.23493975903614459, + "acc_norm_stderr": 0.03300533186128922 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998482, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998482 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270287, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270287 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993178, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993178 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02755361446786381, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02755361446786381 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2641025641025641, + "acc_stderr": 0.022352193737453268, + "acc_norm": 0.2641025641025641, + "acc_norm_stderr": 0.022352193737453268 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114475, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114475 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.024580028921481006, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.024580028921481006 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21367521367521367, + "acc_stderr": 0.026853450377009164, + "acc_norm": 0.21367521367521367, + "acc_norm_stderr": 0.026853450377009164 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.28679245283018867, + "acc_stderr": 0.027834912527544057, + "acc_norm": 0.28679245283018867, + "acc_norm_stderr": 0.027834912527544057 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655078, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655078 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935555, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935555 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776564, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.038270523579507554, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.038270523579507554 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.025251173936495026, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.025251173936495026 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27522935779816515, + "acc_stderr": 0.0191490937431552, + "acc_norm": 0.27522935779816515, + "acc_norm_stderr": 0.0191490937431552 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790607, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790607 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.025058503316958157, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.025058503316958157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290392, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290392 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.01431099954796146, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.01431099954796146 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.30612244897959184, + "acc_stderr": 0.02950489645459596, + "acc_norm": 0.30612244897959184, + "acc_norm_stderr": 0.02950489645459596 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083291, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083291 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.01532182168847619, + "mc2": 0.4062486938859843, + "mc2_stderr": 0.014871974864786166 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31641086186540734, + "acc_stderr": 0.015989617951065477, + "acc_norm": 0.3778040141676505, + "acc_norm_stderr": 0.016669082840694963 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DILAB-HYU/koquality-polyglot-ko-12.8b", + "model_sha": "8db9d0a47a6dc69b8fd405f4053c723a4c54696a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKA-DPO-test-v1/result_2023-11-08 09:27:44.json b/DopeorNope/COKA-DPO-test-v1/result_2023-11-08 09:27:44.json new file mode 100644 index 0000000000000000000000000000000000000000..b869087ecd73a1b44765bd3e0095ac5cd97b8ed3 --- /dev/null +++ b/DopeorNope/COKA-DPO-test-v1/result_2023-11-08 09:27:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225396, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019662 + }, + "harness|ko_hellaswag|10": { + "acc": 0.475502887870942, + "acc_stderr": 0.0049837889926811945, + "acc_norm": 0.5488946425014938, + "acc_norm_stderr": 0.004965866098318169 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36015325670498083, + "acc_stderr": 0.017166362471369302, + "acc_norm": 0.36015325670498083, + "acc_norm_stderr": 0.017166362471369302 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.037498507091740206, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.037498507091740206 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628837, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628837 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511116, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511116 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.026236965881153262, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.026236965881153262 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134986, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134986 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.0320877955878675, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.0320877955878675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.03695183311650232, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.03695183311650232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3025210084033613, + "acc_stderr": 0.02983796238829193, + "acc_norm": 0.3025210084033613, + "acc_norm_stderr": 0.02983796238829193 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896597, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896597 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030049, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030049 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.17733990147783252, + "acc_stderr": 0.026874337276808345, + "acc_norm": 0.17733990147783252, + "acc_norm_stderr": 0.026874337276808345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.024580028921481003, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.024580028921481003 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.03271298896811159, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.03271298896811159 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118352, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118352 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910507, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.025040443877000683, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.025040443877000683 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367746, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367746 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.38308457711442784, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.38308457711442784, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031722, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031722 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.024257901705323374, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.024257901705323374 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886345, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886345 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.31088082901554404, + "acc_stderr": 0.03340361906276586, + "acc_norm": 0.31088082901554404, + "acc_norm_stderr": 0.03340361906276586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28073394495412846, + "acc_stderr": 0.019266055045871616, + "acc_norm": 0.28073394495412846, + "acc_norm_stderr": 0.019266055045871616 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818726, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818726 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.033550453048829226, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.033550453048829226 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.018152871051538816, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.018152871051538816 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642966, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642966 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285714, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.02623287897149166, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.02623287897149166 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19117647058823528, + "acc_stderr": 0.023886881922440355, + "acc_norm": 0.19117647058823528, + "acc_norm_stderr": 0.023886881922440355 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.02688214492230774, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.02688214492230774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35443037974683544, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.35443037974683544, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2653194263363755, + "acc_stderr": 0.011276198843958855, + "acc_norm": 0.2653194263363755, + "acc_norm_stderr": 0.011276198843958855 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.01494881267906214, + "mc2": 0.38215532822863674, + "mc2_stderr": 0.016573212306306796 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28689492325855964, + "acc_stderr": 0.015550809966781775, + "acc_norm": 0.3955135773317591, + "acc_norm_stderr": 0.01681081590220604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKA-DPO-test-v1", + "model_sha": "3cb9d8b6049bcf966d2bf418661c5b7228795949", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKAL-13b-v3/result_2023-10-28 20:08:45.json b/DopeorNope/COKAL-13b-v3/result_2023-10-28 20:08:45.json new file mode 100644 index 0000000000000000000000000000000000000000..8abd948ebfe1fec79e781da4981952adc0daa4b0 --- /dev/null +++ b/DopeorNope/COKAL-13b-v3/result_2023-10-28 20:08:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3575085324232082, + "acc_stderr": 0.014005494275916576, + "acc_norm": 0.4189419795221843, + "acc_norm_stderr": 0.014418106953639011 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40300736904999, + "acc_stderr": 0.004894997736719058, + "acc_norm": 0.5230033857797252, + "acc_norm_stderr": 0.004984497871025246 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5031928480204342, + "acc_stderr": 0.017879598945933085, + "acc_norm": 0.5031928480204342, + "acc_norm_stderr": 0.017879598945933085 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.032061837832361516, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.032061837832361516 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042328, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042328 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194974, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194974 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413324, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859923, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859923 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.03195514741370672, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.03195514741370672 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36897001303780963, + "acc_stderr": 0.012323936650174857, + "acc_norm": 0.36897001303780963, + "acc_norm_stderr": 0.012323936650174857 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766368, + "mc2": 0.4258256217247513, + "mc2_stderr": 0.01532948017384573 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3707201889020071, + "acc_stderr": 0.016605801289212605, + "acc_norm": 0.42621015348288077, + "acc_norm_stderr": 0.01700212260948926 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKAL-13b-v3", + "model_sha": "fb5391a7f4dbc9677819d9e5fd98f1685173a0e8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKAL-DPO_test-v2/result_2023-11-10 08:12:00.json b/DopeorNope/COKAL-DPO_test-v2/result_2023-11-10 08:12:00.json new file mode 100644 index 0000000000000000000000000000000000000000..74bfc21694534b3a81ebdd41f626d55745138651 --- /dev/null +++ b/DopeorNope/COKAL-DPO_test-v2/result_2023-11-10 08:12:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.507679180887372, + "acc_stderr": 0.01460966744089257, + "acc_norm": 0.5563139931740614, + "acc_norm_stderr": 0.014518421825670435 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5033857797251543, + "acc_stderr": 0.004989667009372646, + "acc_norm": 0.6350328619796853, + "acc_norm_stderr": 0.004804370563856224 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.01772458938967779, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.01772458938967779 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028337, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028337 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684973, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101813, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101813 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259287, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271775, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271775 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468633, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468633 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933102, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.01226793547751903, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.01226793547751903 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3684210526315789, + "mc1_stderr": 0.016886551261046046, + "mc2": 0.5150391905534241, + "mc2_stderr": 0.016421569953399714 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.4923258559622196, + "acc_norm_stderr": 0.017188329219654276 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKAL-DPO_test-v2", + "model_sha": "effd7432a8cca9d0bb7b30cf9d093d004ed39616", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKALD-13B-v2/result_2023-11-05 16:40:51.json b/DopeorNope/COKALD-13B-v2/result_2023-11-05 16:40:51.json new file mode 100644 index 0000000000000000000000000000000000000000..02876c89813aee65cf3c4f0438a10b080987e063 --- /dev/null +++ b/DopeorNope/COKALD-13B-v2/result_2023-11-05 16:40:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40784982935153585, + "acc_stderr": 0.014361097288449708, + "acc_norm": 0.4778156996587031, + "acc_norm_stderr": 0.014597001927076133 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4287990440151364, + "acc_stderr": 0.004938930143234453, + "acc_norm": 0.574088826926907, + "acc_norm_stderr": 0.004934698012050241 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.017736470837800694, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.017736470837800694 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.034812853382329645, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.034812853382329645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.02082814851702261, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.02082814851702261 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.02830457667314111, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.02830457667314111 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714854, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714854 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301854, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301854 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35853976531942633, + "acc_stderr": 0.012248487319682751, + "acc_norm": 0.35853976531942633, + "acc_norm_stderr": 0.012248487319682751 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559696, + "mc2": 0.4712352722064192, + "mc2_stderr": 0.015376328355595536 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKALD-13B-v2", + "model_sha": "3e6e8e2882890e69078d236891f9212a5b9d7a50", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKALL-13B-v1/result_2023-11-01 07:29:19.json b/DopeorNope/COKALL-13B-v1/result_2023-11-01 07:29:19.json new file mode 100644 index 0000000000000000000000000000000000000000..2f74e7d84287d4cced3ad197e10240eaa22e9a05 --- /dev/null +++ b/DopeorNope/COKALL-13B-v1/result_2023-11-01 07:29:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4052901023890785, + "acc_stderr": 0.014346869060229327, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007105 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4251145190201155, + "acc_stderr": 0.004933500261683595, + "acc_norm": 0.5697072296355308, + "acc_norm_stderr": 0.004941051795214796 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126167, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.0344578996436275, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.0344578996436275 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5889908256880734, + "acc_stderr": 0.021095050687277656, + "acc_norm": 0.5889908256880734, + "acc_norm_stderr": 0.021095050687277656 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.02811092849280908, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.02811092849280908 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094607, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094607 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293647, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293647 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.03018753206032938, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.03018753206032938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741518, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.01539211880501501, + "mc2": 0.42109130378367604, + "mc2_stderr": 0.014773304652161631 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5667060212514758, + "acc_norm_stderr": 0.017036683641893098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKALL-13B-v1", + "model_sha": "a18441a1da10e5c23877d12fb89bde30d4c108c9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKALL-13B-v2/result_2023-11-01 15:07:31.json b/DopeorNope/COKALL-13B-v2/result_2023-11-01 15:07:31.json new file mode 100644 index 0000000000000000000000000000000000000000..06547c3701c4621eec2fa43100c186f32a2bb16d --- /dev/null +++ b/DopeorNope/COKALL-13B-v2/result_2023-11-01 15:07:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4069965870307167, + "acc_stderr": 0.01435639941800912, + "acc_norm": 0.4684300341296928, + "acc_norm_stderr": 0.014582236460866977 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42471619199362676, + "acc_stderr": 0.004932896472460567, + "acc_norm": 0.569308902609042, + "acc_norm_stderr": 0.004941609820763586 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.017736470837800698, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.017736470837800698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261117, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261117 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.019610851474880286, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.019610851474880286 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190714, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596445, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301857, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301857 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.012223623364044046, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.012223623364044046 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834557, + "mc2": 0.4229553020954532, + "mc2_stderr": 0.01482225107189349 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.01717730199234255, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKALL-13B-v2", + "model_sha": "5aa12e623e32ecb5d455cc2b6ce9c1f2b597c19f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKALL-13B-v3/result_2023-11-02 03:38:01.json b/DopeorNope/COKALL-13B-v3/result_2023-11-02 03:38:01.json new file mode 100644 index 0000000000000000000000000000000000000000..6d2d2d209480ec1d8acf68bc8cda287e1fe8dea7 --- /dev/null +++ b/DopeorNope/COKALL-13B-v3/result_2023-11-02 03:38:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4061433447098976, + "acc_stderr": 0.014351656690097858, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007105 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4255128460466043, + "acc_stderr": 0.004934100774481221, + "acc_norm": 0.5705038836885082, + "acc_norm_stderr": 0.004939925958728879 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5644955300127714, + "acc_stderr": 0.01773058992792658, + "acc_norm": 0.5644955300127714, + "acc_norm_stderr": 0.01773058992792658 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.032321469162244695, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.032321469162244695 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650776, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650776 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959912, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959912 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760065, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760065 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296559, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296559 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.01973700899809461, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.01973700899809461 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042405, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190714, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.012117939998705878, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.012117939998705878 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237265, + "mc2": 0.423972388627194, + "mc2_stderr": 0.014812434868238748 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.500590318772137, + "acc_stderr": 0.017190342123448586, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKALL-13B-v3", + "model_sha": "f894f6dfec6757a7fca3876b4a67e7112a136427", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKALL-13B-v4/result_2023-11-02 05:53:28.json b/DopeorNope/COKALL-13B-v4/result_2023-11-02 05:53:28.json new file mode 100644 index 0000000000000000000000000000000000000000..47166bdba4b6ae66b3a460568d738e8ab5d59b06 --- /dev/null +++ b/DopeorNope/COKALL-13B-v4/result_2023-11-02 05:53:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4061433447098976, + "acc_stderr": 0.014351656690097858, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007105 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4251145190201155, + "acc_stderr": 0.004933500261683595, + "acc_norm": 0.569806811392153, + "acc_norm_stderr": 0.0049409117792733786 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5644955300127714, + "acc_stderr": 0.017730589927926584, + "acc_norm": 0.5644955300127714, + "acc_norm_stderr": 0.017730589927926584 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126167, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.0483036602463533, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.0483036602463533 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.02110912813341391, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.02110912813341391 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293647, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293647 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35267275097783574, + "acc_stderr": 0.012203286846053887, + "acc_norm": 0.35267275097783574, + "acc_norm_stderr": 0.012203286846053887 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.42045074844260966, + "mc2_stderr": 0.014759134992850444 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998564, + "acc_norm": 0.5690672963400236, + "acc_norm_stderr": 0.01702555819604314 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKALL-13B-v4", + "model_sha": "ace3123d6b76b6794d5b796d2ef84a3f1f88ff94", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKAL_merged_test-v1-13B/result_2023-12-19 08:25:29.json b/DopeorNope/COKAL_merged_test-v1-13B/result_2023-12-19 08:25:29.json new file mode 100644 index 0000000000000000000000000000000000000000..1e01e2a5ca4052611530e2c2c66a91bfba123d5f --- /dev/null +++ b/DopeorNope/COKAL_merged_test-v1-13B/result_2023-12-19 08:25:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46075085324232085, + "acc_stderr": 0.014566303676636581, + "acc_norm": 0.514505119453925, + "acc_norm_stderr": 0.014605241081370053 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4541923919537941, + "acc_stderr": 0.004968796800410414, + "acc_norm": 0.6054570802628958, + "acc_norm_stderr": 0.004877534215987093 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5670498084291188, + "acc_stderr": 0.017718469101513985, + "acc_norm": 0.5670498084291188, + "acc_norm_stderr": 0.017718469101513985 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562804, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562804 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.02528558599001783, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.02528558599001783 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681848, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681848 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.02380952380952385, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.02380952380952385 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.026788811931562767, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.026788811931562767 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924318, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924318 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510467998, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510467998 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714847, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714847 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.012223623364044043, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.012223623364044043 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.038254602783800266, + "acc_norm": 0.6, + "acc_norm_stderr": 0.038254602783800266 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.01645126444006824, + "mc2": 0.4904973367131087, + "mc2_stderr": 0.015682971125946653 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5407319952774499, + "acc_stderr": 0.01713321827653767, + "acc_norm": 0.577331759149941, + "acc_norm_stderr": 0.016983506079577604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKAL_merged_test-v1-13B", + "model_sha": "4164e460dbf37491becf4f987dedaa0628cadbdd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKAL_pre_DPO_Test_v1-13b/result_2023-11-10 04:48:39.json b/DopeorNope/COKAL_pre_DPO_Test_v1-13b/result_2023-11-10 04:48:39.json new file mode 100644 index 0000000000000000000000000000000000000000..e27bcd456846e9f8be4496f4f048843533736ce2 --- /dev/null +++ b/DopeorNope/COKAL_pre_DPO_Test_v1-13b/result_2023-11-10 04:48:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4206484641638225, + "acc_stderr": 0.014426211252508406, + "acc_norm": 0.4786689419795222, + "acc_norm_stderr": 0.014598087973127104 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4311890061740689, + "acc_stderr": 0.004942302768002103, + "acc_norm": 0.5746863174666401, + "acc_norm_stderr": 0.004933800927560538 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.01775880053421441, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.01775880053421441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413866, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413866 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833935, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833935 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6311926605504588, + "acc_stderr": 0.020686227560729534, + "acc_norm": 0.6311926605504588, + "acc_norm_stderr": 0.020686227560729534 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.039105257528497264, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.039105257528497264 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866342, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866342 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.031137304297185805, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.031137304297185805 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3533246414602347, + "acc_stderr": 0.012208408211082425, + "acc_norm": 0.3533246414602347, + "acc_norm_stderr": 0.012208408211082425 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882462, + "mc2": 0.4661645299206862, + "mc2_stderr": 0.015362128030709755 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47107438016528924, + "acc_stderr": 0.017161563949916345, + "acc_norm": 0.5230224321133412, + "acc_norm_stderr": 0.017172121546727634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKAL_pre_DPO_Test_v1-13b", + "model_sha": "bb7af9fcf945355418b9457538f30f0fd41b5aac", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKAL_pre_DPO_Test_v2-13b/result_2023-11-11 06:26:11.json b/DopeorNope/COKAL_pre_DPO_Test_v2-13b/result_2023-11-11 06:26:11.json new file mode 100644 index 0000000000000000000000000000000000000000..e6584845da85dd0faf2b1cb1373964286d58e03f --- /dev/null +++ b/DopeorNope/COKAL_pre_DPO_Test_v2-13b/result_2023-11-11 06:26:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41638225255972694, + "acc_stderr": 0.01440561827943618, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947096 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43158733320055764, + "acc_stderr": 0.004942853459371549, + "acc_norm": 0.5758812985461064, + "acc_norm_stderr": 0.004931984642695335 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.01772458938967779, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.01772458938967779 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.0348890161685273, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.0348890161685273 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883231, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883231 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844058, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844058 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6165137614678899, + "acc_stderr": 0.020847156641915984, + "acc_norm": 0.6165137614678899, + "acc_norm_stderr": 0.020847156641915984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.02838425670488304, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.02838425670488304 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.019610851474880286, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.019610851474880286 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755805, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755805 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647206, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647206 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.0303720158854282, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.0303720158854282 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35658409387222945, + "acc_stderr": 0.012233642989273888, + "acc_norm": 0.35658409387222945, + "acc_norm_stderr": 0.012233642989273888 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.01605899902610062, + "mc2": 0.46120733649464474, + "mc2_stderr": 0.015282924396450131 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.01710761885954935, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKAL_pre_DPO_Test_v2-13b", + "model_sha": "e2fb97d3ef746540a5900ad1e19250dd74dc429f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COLA3-7B/result_2023-10-03 08:35:59.json b/DopeorNope/COLA3-7B/result_2023-10-03 08:35:59.json new file mode 100644 index 0000000000000000000000000000000000000000..50bd11017fdd1d0b7bb08b75a748f636ce3c91b8 --- /dev/null +++ b/DopeorNope/COLA3-7B/result_2023-10-03 08:35:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3310580204778157, + "acc_stderr": 0.013752062419817832, + "acc_norm": 0.3916382252559727, + "acc_norm_stderr": 0.014264122124938215 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3873730332603067, + "acc_stderr": 0.004861544478451855, + "acc_norm": 0.5097590121489743, + "acc_norm_stderr": 0.004988830884131634 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3895274584929757, + "acc_stderr": 0.017438082556264594, + "acc_norm": 0.3895274584929757, + "acc_norm_stderr": 0.017438082556264594 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.029644006577009618, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.029644006577009618 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40192926045016075, + "acc_stderr": 0.027846476005930477, + "acc_norm": 0.40192926045016075, + "acc_norm_stderr": 0.027846476005930477 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.035315058793591834, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.035315058793591834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.02311936275823229, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.02311936275823229 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.047323326159788126, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.047323326159788126 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534327, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534327 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.48717948717948717, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.48717948717948717, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33584905660377357, + "acc_stderr": 0.029067220146644823, + "acc_norm": 0.33584905660377357, + "acc_norm_stderr": 0.029067220146644823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.0472457740573157, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.0472457740573157 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.46766169154228854, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.46766169154228854, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.407514450867052, + "acc_stderr": 0.026454578146931505, + "acc_norm": 0.407514450867052, + "acc_norm_stderr": 0.026454578146931505 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.03680350371286461, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.03680350371286461 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.02716368603827123, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.02716368603827123 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42018348623853213, + "acc_stderr": 0.021162420048273508, + "acc_norm": 0.42018348623853213, + "acc_norm_stderr": 0.021162420048273508 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119667, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119667 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706214, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706214 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553977, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553977 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.01201414210184297, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.01201414210184297 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.033744993563193555, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.033744993563193555 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.01494881267906214, + "mc2": 0.3781293727977648, + "mc2_stderr": 0.014917319628125631 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21133412042502953, + "acc_stderr": 0.01403609034293031, + "acc_norm": 0.3022432113341204, + "acc_norm_stderr": 0.01578865486302237 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COLA3-7B", + "model_sha": "90a961edc95e63c6b777402191b76fbfa3ed3a8d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COLA3_13B/result_2023-10-05 10:17:21.json b/DopeorNope/COLA3_13B/result_2023-10-05 10:17:21.json new file mode 100644 index 0000000000000000000000000000000000000000..a9e122a6ed3818585ed3a3421f5d97640e497e90 --- /dev/null +++ b/DopeorNope/COLA3_13B/result_2023-10-05 10:17:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042192, + "acc_norm": 0.42235494880546076, + "acc_norm_stderr": 0.014434138713379981 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4082852021509659, + "acc_stderr": 0.004905119039849461, + "acc_norm": 0.5435172276438957, + "acc_norm_stderr": 0.004970846697552308 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5057471264367817, + "acc_stderr": 0.017878782326129224, + "acc_norm": 0.5057471264367817, + "acc_norm_stderr": 0.017878782326129224 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.03078373675774564, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.03078373675774564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929187, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929187 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566197, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566197 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.02475600038213094, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.02475600038213094 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.04760548821460325, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.04760548821460325 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.026788811931562757, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.026788811931562757 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144807, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48256880733944957, + "acc_stderr": 0.021424291871853147, + "acc_norm": 0.48256880733944957, + "acc_norm_stderr": 0.021424291871853147 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604674, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604674 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.02830457667314112, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.02830457667314112 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093085, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093085 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516994, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.026040662474201285, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.026040662474201285 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.03254462010767859, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.03254462010767859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30247718383311606, + "acc_stderr": 0.0117315242341657, + "acc_norm": 0.30247718383311606, + "acc_norm_stderr": 0.0117315242341657 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826824, + "mc2": 0.40933802446057865, + "mc2_stderr": 0.014937193336867839 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4557260920897285, + "acc_stderr": 0.017122829143292648, + "acc_norm": 0.5147579693034239, + "acc_norm_stderr": 0.01718286443499856 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COLA3_13B", + "model_sha": "7725e7a1c6f8f022c7c4ec0286dd9f7fada126bd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COLA_LO-7B/result_2023-10-03 17:04:14.json b/DopeorNope/COLA_LO-7B/result_2023-10-03 17:04:14.json new file mode 100644 index 0000000000000000000000000000000000000000..549d48ae36153fef33571db99b3abc62ef3f68ac --- /dev/null +++ b/DopeorNope/COLA_LO-7B/result_2023-10-03 17:04:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.013760988200880533, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892884 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3857797251543517, + "acc_stderr": 0.004857840934549158, + "acc_norm": 0.5046803425612428, + "acc_norm_stderr": 0.004989562798280523 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.04689765937278134, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.04689765937278134 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41762452107279696, + "acc_stderr": 0.01763563732695152, + "acc_norm": 0.41762452107279696, + "acc_norm_stderr": 0.01763563732695152 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.02977164271249123, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.02977164271249123 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41414141414141414, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.41414141414141414, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204433, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204433 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144444, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144444 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.02743086657997347, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.02743086657997347 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5042735042735043, + "acc_stderr": 0.03275489264382132, + "acc_norm": 0.5042735042735043, + "acc_norm_stderr": 0.03275489264382132 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33962264150943394, + "acc_stderr": 0.029146904747798352, + "acc_norm": 0.33962264150943394, + "acc_norm_stderr": 0.029146904747798352 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.046313813194254635, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.046313813194254635 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987054, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987054 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43283582089552236, + "acc_stderr": 0.0350349092367328, + "acc_norm": 0.43283582089552236, + "acc_norm_stderr": 0.0350349092367328 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191181, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191181 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.021765961672154537, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.021765961672154537 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.02681771813034892, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.02681771813034892 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.02716368603827123, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.02716368603827123 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924318, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924318 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.48760330578512395, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119667, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119667 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093095, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093095 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320203, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271486, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.42616033755274263, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.42616033755274263, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846146, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846146 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.033744993563193555, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.033744993563193555 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520688, + "mc2": 0.3821911392219441, + "mc2_stderr": 0.014928316371274168 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21959858323494688, + "acc_stderr": 0.014232743085580275, + "acc_norm": 0.29634002361275086, + "acc_norm_stderr": 0.015699701628594232 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COLA_LO-7B", + "model_sha": "4cccb5249ea36f58588c32fe58c6f104f89f0487", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Dear_My_best_Friend-SFT-v2-13B/result_2023-11-25 04:59:10.json b/DopeorNope/Dear_My_best_Friend-SFT-v2-13B/result_2023-11-25 04:59:10.json new file mode 100644 index 0000000000000000000000000000000000000000..729986bb788de366337e28504f4e0e6bb63c2f1f --- /dev/null +++ b/DopeorNope/Dear_My_best_Friend-SFT-v2-13B/result_2023-11-25 04:59:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4138225255972696, + "acc_stderr": 0.014392730009221009, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947098 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4291973710416252, + "acc_stderr": 0.004939500404882179, + "acc_norm": 0.5743875721967735, + "acc_norm_stderr": 0.004934250390879774 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394223, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394223 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.02518914989476419, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.02518914989476419 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655816, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655816 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988334, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215923, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215923 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34876140808344197, + "acc_stderr": 0.012172035157127115, + "acc_norm": 0.34876140808344197, + "acc_norm_stderr": 0.012172035157127115 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.46238471252084135, + "mc2_stderr": 0.015296846959143042 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46635182998819363, + "acc_stderr": 0.017151384117131865, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682868 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Dear_My_best_Friend-SFT-v2-13B", + "model_sha": "ef7f609ba5694a3740f8a95e1c7699a1d42abb1f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Dear_My_best_Friends-13B/result_2023-11-12 16:14:26.json b/DopeorNope/Dear_My_best_Friends-13B/result_2023-11-12 16:14:26.json new file mode 100644 index 0000000000000000000000000000000000000000..cb4e8806a2c21d90ea31d8404855b6c0ef7e553d --- /dev/null +++ b/DopeorNope/Dear_My_best_Friends-13B/result_2023-11-12 16:14:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47440273037542663, + "acc_stderr": 0.014592230885298964, + "acc_norm": 0.5170648464163823, + "acc_norm_stderr": 0.014602878388536595 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45956980681139215, + "acc_stderr": 0.004973442060741618, + "acc_norm": 0.5844453296156145, + "acc_norm_stderr": 0.0049181021687179334 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4929757343550447, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.4929757343550447, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.03078373675774565, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.03078373675774565 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562807, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562807 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40336134453781514, + "acc_stderr": 0.03186608121408831, + "acc_norm": 0.40336134453781514, + "acc_norm_stderr": 0.03186608121408831 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561063, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881565, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881565 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5024875621890548, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.5024875621890548, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535886, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535886 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147124, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147124 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259283, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259283 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.0193733324207245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.0193733324207245 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396587, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.031891418324213966, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.031891418324213966 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085455, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085455 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156474, + "mc2": 0.38803930344769905, + "mc2_stderr": 0.016051378097776924 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.01718765819933674, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.01713648762604985 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Dear_My_best_Friends-13B", + "model_sha": "6daa83ba6ec2d97df05703f1cb6665eb68b11e0e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Dear_My_best_Friends-v2-13B/result_2023-11-25 04:58:26.json b/DopeorNope/Dear_My_best_Friends-v2-13B/result_2023-11-25 04:58:26.json new file mode 100644 index 0000000000000000000000000000000000000000..00e2119ea358ca7d24a9ee3d34d9508e45c5ceff --- /dev/null +++ b/DopeorNope/Dear_My_best_Friends-v2-13B/result_2023-11-25 04:58:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4761092150170648, + "acc_stderr": 0.014594701798071654, + "acc_norm": 0.5443686006825939, + "acc_norm_stderr": 0.01455374993930687 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4691296554471221, + "acc_stderr": 0.004980262025472487, + "acc_norm": 0.617307309300936, + "acc_norm_stderr": 0.004850508945116094 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.017758800534214407, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.017758800534214407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339525, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339525 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841587, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841587 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.039158572914369714, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.039158572914369714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6018348623853211, + "acc_stderr": 0.020987989422654254, + "acc_norm": 0.6018348623853211, + "acc_norm_stderr": 0.020987989422654254 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013317, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013317 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.019610851474880286, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.019610851474880286 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261445, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261445 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.03141470802586589, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.03141470802586589 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3604954367666232, + "acc_stderr": 0.012263110237299233, + "acc_norm": 0.3604954367666232, + "acc_norm_stderr": 0.012263110237299233 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.038348163554011806, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.038348163554011806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3317013463892289, + "mc1_stderr": 0.016482148810241463, + "mc2": 0.47986111470028925, + "mc2_stderr": 0.01605545610064073 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998564, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.01718027524608563 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Dear_My_best_Friends-v2-13B", + "model_sha": "89fadbe4d9c022448dd86b2405043887561cf791", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/KOAT-5.8b/result_2023-10-01 15:52:29.json b/DopeorNope/KOAT-5.8b/result_2023-10-01 15:52:29.json new file mode 100644 index 0000000000000000000000000000000000000000..8c9f1092e2ad00e70deff8ceb575581573e5a840 --- /dev/null +++ b/DopeorNope/KOAT-5.8b/result_2023-10-01 15:52:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25597269624573377, + "acc_stderr": 0.012753013241244513, + "acc_norm": 0.30716723549488056, + "acc_norm_stderr": 0.013481034054980945 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3408683529177455, + "acc_stderr": 0.00473032455662415, + "acc_norm": 0.4153555068711412, + "acc_norm_stderr": 0.004917761181740164 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.041858325989283136, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.041858325989283136 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21966794380587484, + "acc_stderr": 0.01480538447837116, + "acc_norm": 0.21966794380587484, + "acc_norm_stderr": 0.01480538447837116 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614866, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614866 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.22127659574468084, + "acc_stderr": 0.027136349602424063, + "acc_norm": 0.22127659574468084, + "acc_norm_stderr": 0.027136349602424063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944968, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944968 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.25112107623318386, + "acc_stderr": 0.029105220833224605, + "acc_norm": 0.25112107623318386, + "acc_norm_stderr": 0.029105220833224605 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.03031371053819888, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.03031371053819888 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.034559302019248124, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.034559302019248124 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.03424084669891521, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.03424084669891521 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.030176808288974337, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.030176808288974337 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2794871794871795, + "acc_stderr": 0.022752388839776826, + "acc_norm": 0.2794871794871795, + "acc_norm_stderr": 0.022752388839776826 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.13, + "acc_stderr": 0.033799766898963086, + "acc_norm": 0.13, + "acc_norm_stderr": 0.033799766898963086 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03755265865037181, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03755265865037181 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114485, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004264, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21132075471698114, + "acc_stderr": 0.02512576648482784, + "acc_norm": 0.21132075471698114, + "acc_norm_stderr": 0.02512576648482784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.0449429086625209, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.0449429086625209 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.030769444967296014, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.030769444967296014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889904, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889904 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.02210112878741543, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.02210112878741543 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294677, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27461139896373055, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.27461139896373055, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343602, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343602 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.025646863097137904, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.025646863097137904 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2066115702479339, + "acc_stderr": 0.03695980128098824, + "acc_norm": 0.2066115702479339, + "acc_norm_stderr": 0.03695980128098824 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.01672993756553755, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.01672993756553755 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902002, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902002 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.034057028381856924, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.034057028381856924 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.03038805130167812, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.03038805130167812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850417, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850417 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.026799562024887685, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.026799562024887685 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.02671143055553839, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.02671143055553839 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.027652153144159267, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.027652153144159267 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2653194263363755, + "acc_stderr": 0.011276198843958873, + "acc_norm": 0.2653194263363755, + "acc_norm_stderr": 0.011276198843958873 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693268, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693268 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.01505186948671501, + "mc2": 0.41023662722679205, + "mc2_stderr": 0.016160843398647234 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30460448642266824, + "acc_stderr": 0.01582336727312939, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.016366945603281273 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/KOAT-5.8b", + "model_sha": "768c40d2ffbddbc8aa15eed33234eef248eb43e7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/SOLAR_C-v1-10.7B/result_2023-12-28 05:34:17.json b/DopeorNope/SOLAR_C-v1-10.7B/result_2023-12-28 05:34:17.json new file mode 100644 index 0000000000000000000000000000000000000000..7cc948bf80c33a049f89376eeeeebd75c11e6e47 --- /dev/null +++ b/DopeorNope/SOLAR_C-v1-10.7B/result_2023-12-28 05:34:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.0143839153022254, + "acc_norm": 0.47525597269624575, + "acc_norm_stderr": 0.014593487694937736 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2984465245966939, + "acc_stderr": 0.004566412808642454, + "acc_norm": 0.34475204142601074, + "acc_norm_stderr": 0.004743160034271155 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5134099616858238, + "acc_stderr": 0.017873531736510365, + "acc_norm": 0.5134099616858238, + "acc_norm_stderr": 0.017873531736510365 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.028384747788813332, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.028384747788813332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131143, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131143 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756656, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756656 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104282, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104282 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.563302752293578, + "acc_stderr": 0.02126482015871421, + "acc_norm": 0.563302752293578, + "acc_norm_stderr": 0.02126482015871421 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.042943408452120926, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.042943408452120926 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4199346405228758, + "acc_stderr": 0.019966811178256483, + "acc_norm": 0.4199346405228758, + "acc_norm_stderr": 0.019966811178256483 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.293854748603352, + "acc_stderr": 0.015235075776719603, + "acc_norm": 0.293854748603352, + "acc_norm_stderr": 0.015235075776719603 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7130801687763713, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.7130801687763713, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.378748370273794, + "acc_stderr": 0.01238905210500373, + "acc_norm": 0.378748370273794, + "acc_norm_stderr": 0.01238905210500373 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3353733170134639, + "mc1_stderr": 0.016527534039668987, + "mc2": 0.4883439191552012, + "mc2_stderr": 0.01576336696184338 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.10153482880755609, + "acc_stderr": 0.010384198041619998, + "acc_norm": 0.30814639905548996, + "acc_norm_stderr": 0.01587451515629839 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/SOLAR_C-v1-10.7B", + "model_sha": "9521d07028323f3055664fe03904caeac51b6141", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/SOLAR_C-v2-10.7B/result_2023-12-31 03:36:54.json b/DopeorNope/SOLAR_C-v2-10.7B/result_2023-12-31 03:36:54.json new file mode 100644 index 0000000000000000000000000000000000000000..33652780923e83b9aaebd2519c4cf39239ee78b2 --- /dev/null +++ b/DopeorNope/SOLAR_C-v2-10.7B/result_2023-12-31 03:36:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4377133105802048, + "acc_stderr": 0.014497573881108282, + "acc_norm": 0.4726962457337884, + "acc_norm_stderr": 0.014589589101985994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4387572196773551, + "acc_stderr": 0.004952209831856589, + "acc_norm": 0.5931089424417447, + "acc_norm_stderr": 0.004902502514738597 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5977011494252874, + "acc_stderr": 0.017535294529068945, + "acc_norm": 0.5977011494252874, + "acc_norm_stderr": 0.017535294529068945 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.02812534098397271, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.02812534098397271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03358618145732523, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03358618145732523 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082636, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5516129032258065, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.5516129032258065, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.02645350805404034, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.02645350805404034 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719197, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719197 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41534391534391535, + "acc_stderr": 0.025379524910778408, + "acc_norm": 0.41534391534391535, + "acc_norm_stderr": 0.025379524910778408 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206177, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5432098765432098, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.5432098765432098, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6678899082568808, + "acc_stderr": 0.020192682985423344, + "acc_norm": 0.6678899082568808, + "acc_norm_stderr": 0.020192682985423344 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.020226106567657807, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.020226106567657807 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.02909767559946393, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.02909767559946393 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3396648044692737, + "acc_stderr": 0.015839400406212494, + "acc_norm": 0.3396648044692737, + "acc_norm_stderr": 0.015839400406212494 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39308996088657105, + "acc_stderr": 0.012474899613873958, + "acc_norm": 0.39308996088657105, + "acc_norm_stderr": 0.012474899613873958 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088299, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088299 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323002, + "mc2": 0.4665292926452916, + "mc2_stderr": 0.015401236485664634 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.01714571536548667 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/SOLAR_C-v2-10.7B", + "model_sha": "cadae29eebed0be5db4f227ee918e9e988c9715b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Yi_lee-v1-6B/result_2023-12-05 15:04:18.json b/DopeorNope/Yi_lee-v1-6B/result_2023-12-05 15:04:18.json new file mode 100644 index 0000000000000000000000000000000000000000..ad01e028b508e9450f624b281361e92547feab38 --- /dev/null +++ b/DopeorNope/Yi_lee-v1-6B/result_2023-12-05 15:04:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840055, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3986257717586138, + "acc_stderr": 0.004886147907627406, + "acc_norm": 0.5336586337382991, + "acc_norm_stderr": 0.004978462690966918 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.545338441890166, + "acc_stderr": 0.0178063045850526, + "acc_norm": 0.545338441890166, + "acc_norm_stderr": 0.0178063045850526 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.032321469162244695, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.032321469162244695 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999936, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999936 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.030770900763851316, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.030770900763851316 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6440366972477064, + "acc_stderr": 0.020528559278244218, + "acc_norm": 0.6440366972477064, + "acc_norm_stderr": 0.020528559278244218 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.039701582732351734, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.039701582732351734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786685, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786685 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03246887243637649, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03246887243637649 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121603, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121603 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.03186785930004129, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.03186785930004129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464747, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464747 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.038517163193983926, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.038517163193983926 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4054851425091592, + "mc2_stderr": 0.014739428749798467 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.538370720188902, + "acc_stderr": 0.017139660221845553, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.017014038119297498 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Yi_lee-v1-6B", + "model_sha": "74357eee5f2ba34e74129c7955b9cf228e68d857", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Yi_lee-v1-DPO-6B/result_2023-12-06 09:37:46.json b/DopeorNope/Yi_lee-v1-DPO-6B/result_2023-12-06 09:37:46.json new file mode 100644 index 0000000000000000000000000000000000000000..dcc6df1d6b8a940386d2e6dd309d1f8d341ea7db --- /dev/null +++ b/DopeorNope/Yi_lee-v1-DPO-6B/result_2023-12-06 09:37:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840055, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3988249352718582, + "acc_stderr": 0.004886559008754986, + "acc_norm": 0.5338577972515435, + "acc_norm_stderr": 0.004978328190775524 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.01781040392543535, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.01781040392543535 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999936, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999936 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.030770900763851316, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.030770900763851316 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6385321100917432, + "acc_stderr": 0.02059808200993736, + "acc_norm": 0.6385321100917432, + "acc_norm_stderr": 0.02059808200993736 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786685, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786685 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329387, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329387 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.011971507294982777, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.011971507294982777 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070264, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070264 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4052668487406766, + "mc2_stderr": 0.014737077394415871 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5348288075560803, + "acc_stderr": 0.017148598015747425, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.017014038119297498 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Yi_lee-v1-DPO-6B", + "model_sha": "fc6c12a061bb2f785c762593f50a2da372b1c5e8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Yi_lee-v2-DPO-6B/result_2023-12-12 07:57:04.json b/DopeorNope/Yi_lee-v2-DPO-6B/result_2023-12-12 07:57:04.json new file mode 100644 index 0000000000000000000000000000000000000000..c73ff3e0e98ca56f04e813850208c4a628b4a451 --- /dev/null +++ b/DopeorNope/Yi_lee-v2-DPO-6B/result_2023-12-12 07:57:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3506825938566553, + "acc_stderr": 0.01394463593072609, + "acc_norm": 0.40784982935153585, + "acc_norm_stderr": 0.014361097288449696 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3966341366261701, + "acc_stderr": 0.004881990487628913, + "acc_norm": 0.527185819557857, + "acc_norm_stderr": 0.004982400368939667 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.01775339697390848, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.01775339697390848 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.0378913442461155, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.0378913442461155 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.02837327096106942, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.02837327096106942 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555498, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555498 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.0253106392549339, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.0253106392549339 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674078, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230182, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230182 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.02437319786798305, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.02437319786798305 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756653, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.0207283684576385, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.0207283684576385 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259293, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.0399930971277747, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.0399930971277747 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.02772498944950931, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.02772498944950931 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025425, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025425 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.01450897945355398, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.01450897945355398 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851859, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.01197150729498278, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.01197150729498278 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4452362200636334, + "mc2_stderr": 0.01509557301201901 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5301062573789846, + "acc_stderr": 0.017159163590170223, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.016929480234495226 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Yi_lee-v2-DPO-6B", + "model_sha": "3c1d2d605a5c621cfa2351b4b9061519fde23730", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/ZeroCoka-7B/result_2023-10-11 12:06:32.json b/DopeorNope/ZeroCoka-7B/result_2023-10-11 12:06:32.json new file mode 100644 index 0000000000000000000000000000000000000000..60906afd8437b4b94874dfb43e59640d427cbdf0 --- /dev/null +++ b/DopeorNope/ZeroCoka-7B/result_2023-10-11 12:06:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27986348122866894, + "acc_stderr": 0.013119040897725923, + "acc_norm": 0.3455631399317406, + "acc_norm_stderr": 0.013896938461145687 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36566421031667, + "acc_stderr": 0.0048063163427093936, + "acc_norm": 0.48466440948018324, + "acc_norm_stderr": 0.004987433862274562 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41890166028097064, + "acc_stderr": 0.017643205052377185, + "acc_norm": 0.41890166028097064, + "acc_norm_stderr": 0.017643205052377185 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.029644006577009618, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.029644006577009618 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.02340092891831049, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.02340092891831049 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.02748054188795359, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.02748054188795359 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4829059829059829, + "acc_stderr": 0.032736940493481824, + "acc_norm": 0.4829059829059829, + "acc_norm_stderr": 0.032736940493481824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3471698113207547, + "acc_stderr": 0.02930010170554965, + "acc_norm": 0.3471698113207547, + "acc_norm_stderr": 0.02930010170554965 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095932, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095932 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119996, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119996 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4228855721393035, + "acc_stderr": 0.03493231777421282, + "acc_norm": 0.4228855721393035, + "acc_norm_stderr": 0.03493231777421282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267437, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267437 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.0220190800122179, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.0220190800122179 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724145, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724145 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271226, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271226 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38860103626943004, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.38860103626943004, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3779816513761468, + "acc_stderr": 0.02078918706672811, + "acc_norm": 0.3779816513761468, + "acc_norm_stderr": 0.02078918706672811 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604672, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604672 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171566, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171566 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926605, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.018718067052623216, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.018718067052623216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261446, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261446 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3142857142857143, + "acc_stderr": 0.029719329422417468, + "acc_norm": 0.3142857142857143, + "acc_norm_stderr": 0.029719329422417468 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45147679324894513, + "acc_stderr": 0.0323936001739747, + "acc_norm": 0.45147679324894513, + "acc_norm_stderr": 0.0323936001739747 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.01184923429145932, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.01184923429145932 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456411, + "mc2": 0.3826229918315052, + "mc2_stderr": 0.015120737226444851 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21251475796930341, + "acc_stderr": 0.014064703386174934, + "acc_norm": 0.29988193624557263, + "acc_norm_stderr": 0.015753447615429458 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/ZeroCoka-7B", + "model_sha": "3025135b08f7d052531fcd8f6a4a5a97e4e25c76", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Zero_COKE_K-13B/result_2023-10-08 06:50:15.json b/DopeorNope/Zero_COKE_K-13B/result_2023-10-08 06:50:15.json new file mode 100644 index 0000000000000000000000000000000000000000..883f9a68bf67e13282896527af6324985db9d323 --- /dev/null +++ b/DopeorNope/Zero_COKE_K-13B/result_2023-10-08 06:50:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35238907849829354, + "acc_stderr": 0.01396014260059869, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735569 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3679545907189803, + "acc_stderr": 0.0048126332800782715, + "acc_norm": 0.46932881896036643, + "acc_norm_stderr": 0.004980384575535391 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.01784491809046854, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.01784491809046854 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085335, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085335 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110656, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110656 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583639, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583639 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377563, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377563 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528777, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528777 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013317, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013317 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.018999707383162666, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.018999707383162666 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293648, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293648 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2927374301675978, + "acc_stderr": 0.015218109544410182, + "acc_norm": 0.2927374301675978, + "acc_norm_stderr": 0.015218109544410182 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280065, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280065 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533485, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533485 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646563, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646563 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32068543451652387, + "mc1_stderr": 0.0163391703732809, + "mc2": 0.498111749136946, + "mc2_stderr": 0.015897921630313217 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4203069657615112, + "acc_stderr": 0.016970598281177703, + "acc_norm": 0.4344746162927981, + "acc_norm_stderr": 0.01704209862082494 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Zero_COKE_K-13B", + "model_sha": "fda4838dd7feb06c1289ae143810c67a59a72961", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/mistralopithecus-v1-SFT/result_2023-11-26 08:04:14.json b/DopeorNope/mistralopithecus-v1-SFT/result_2023-11-26 08:04:14.json new file mode 100644 index 0000000000000000000000000000000000000000..cdbafcb5e800c4d0e2bad66f3477b81561ff05ad --- /dev/null +++ b/DopeorNope/mistralopithecus-v1-SFT/result_2023-11-26 08:04:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4283276450511945, + "acc_stderr": 0.014460496367599008, + "acc_norm": 0.47696245733788395, + "acc_norm_stderr": 0.014595873205358269 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40689105755825533, + "acc_stderr": 0.004902502514738604, + "acc_norm": 0.5343557060346544, + "acc_norm_stderr": 0.00497798845250264 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.03833185275213025, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.03833185275213025 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5504469987228607, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.5504469987228607, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681848, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681848 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809447, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809447 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206174, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833942, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336938, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336938 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.021311335009708582, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.021311335009708582 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521664, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521664 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.02746470844202214, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.02746470844202214 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18659217877094972, + "acc_stderr": 0.013029631416358349, + "acc_norm": 0.18659217877094972, + "acc_norm_stderr": 0.013029631416358349 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003202, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003202 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.4537563569615343, + "mc2_stderr": 0.015481816857869497 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44391971664698937, + "acc_stderr": 0.017081884623542543, + "acc_norm": 0.47461629279811096, + "acc_norm_stderr": 0.017168187201429253 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/mistralopithecus-v1-SFT", + "model_sha": "d287f71c14d2bfbcaa053dcaa6b9b22dd5bc0f1a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/mistralopithecus-v1-dpo-7b/result_2023-11-26 08:11:29.json b/DopeorNope/mistralopithecus-v1-dpo-7b/result_2023-11-26 08:11:29.json new file mode 100644 index 0000000000000000000000000000000000000000..b9b490954d6695aff4afab90ac2f9d1287a59229 --- /dev/null +++ b/DopeorNope/mistralopithecus-v1-dpo-7b/result_2023-11-26 08:11:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4872013651877133, + "acc_stderr": 0.014606603181012546, + "acc_norm": 0.5273037542662116, + "acc_norm_stderr": 0.014589589101985994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4403505277833101, + "acc_stderr": 0.004954146286513353, + "acc_norm": 0.55646285600478, + "acc_norm_stderr": 0.004957863944093124 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4929757343550447, + "acc_stderr": 0.01787819900343221, + "acc_norm": 0.4929757343550447, + "acc_norm_stderr": 0.01787819900343221 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734025, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911522, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911522 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.02525303255499768, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.02525303255499768 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.027563010971606676, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.027563010971606676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.02143642095552942, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.02143642095552942 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.01965992249362334, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.01965992249362334 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534795, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534795 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20335195530726258, + "acc_stderr": 0.013461351487507506, + "acc_norm": 0.20335195530726258, + "acc_norm_stderr": 0.013461351487507506 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682487, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3109517601043025, + "acc_stderr": 0.0118222529177992, + "acc_norm": 0.3109517601043025, + "acc_norm_stderr": 0.0118222529177992 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361016, + "mc2": 0.4499453306291458, + "mc2_stderr": 0.016369397422184195 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.016021427055309578, + "acc_norm": 0.3270365997638725, + "acc_norm_stderr": 0.016129047485457022 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/mistralopithecus-v1-dpo-7b", + "model_sha": "ec7557bb2a4fbbb775d057f98f98ae6b4430c8d5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/mistralopithecus-v1-dpo/result_2023-11-26 08:04:46.json b/DopeorNope/mistralopithecus-v1-dpo/result_2023-11-26 08:04:46.json new file mode 100644 index 0000000000000000000000000000000000000000..7c3ee66112659101e7efd30db17a59614ef0962c --- /dev/null +++ b/DopeorNope/mistralopithecus-v1-dpo/result_2023-11-26 08:04:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4872013651877133, + "acc_stderr": 0.014606603181012546, + "acc_norm": 0.5273037542662116, + "acc_norm_stderr": 0.014589589101985994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4404501095399323, + "acc_stderr": 0.004954265595373461, + "acc_norm": 0.5565624377614021, + "acc_norm_stderr": 0.004957750897152936 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4929757343550447, + "acc_stderr": 0.01787819900343221, + "acc_norm": 0.4929757343550447, + "acc_norm_stderr": 0.01787819900343221 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734025, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911522, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911522 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.02525303255499768, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.02525303255499768 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.027563010971606676, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.027563010971606676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.02143642095552942, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.02143642095552942 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.01965992249362334, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.01965992249362334 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534795, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534795 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20335195530726258, + "acc_stderr": 0.013461351487507506, + "acc_norm": 0.20335195530726258, + "acc_norm_stderr": 0.013461351487507506 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682487, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3109517601043025, + "acc_stderr": 0.0118222529177992, + "acc_norm": 0.3109517601043025, + "acc_norm_stderr": 0.0118222529177992 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361016, + "mc2": 0.44995826505029746, + "mc2_stderr": 0.016369720959182137 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.016021427055309578, + "acc_norm": 0.3270365997638725, + "acc_norm_stderr": 0.016129047485457022 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/mistralopithecus-v1-dpo", + "model_sha": "ec7557bb2a4fbbb775d057f98f98ae6b4430c8d5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/mistralopithecus-v2-dpo-7b/result_2023-11-26 09:15:23.json b/DopeorNope/mistralopithecus-v2-dpo-7b/result_2023-11-26 09:15:23.json new file mode 100644 index 0000000000000000000000000000000000000000..5449cfa7dc3410d7847b6fee9bca5c91149a1d68 --- /dev/null +++ b/DopeorNope/mistralopithecus-v2-dpo-7b/result_2023-11-26 09:15:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.17235494880546076, + "acc_stderr": 0.011037113093461295, + "acc_norm": 0.2525597269624573, + "acc_norm_stderr": 0.012696728980207706 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25423222465644296, + "acc_stderr": 0.00434538861452003, + "acc_norm": 0.24576777534355707, + "acc_norm_stderr": 0.0042966158627866305 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715014, + "mc2": 0.496495319717773, + "mc2_stderr": 0.016950807749782918 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08382526564344746, + "acc_stderr": 0.009527773913592165, + "acc_norm": 0.38961038961038963, + "acc_norm_stderr": 0.016766161671893494 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/mistralopithecus-v2-dpo-7b", + "model_sha": "4481f2a07c5b4c31f650c94b558bec12ff8cddff", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/EleutherAI/polyglot-ko-1.3b/result_2023-09-24 15:21:38.json b/EleutherAI/polyglot-ko-1.3b/result_2023-09-24 15:21:38.json new file mode 100644 index 0000000000000000000000000000000000000000..1b7095328882e4fa2330ae228ca1c8942721fe6c --- /dev/null +++ b/EleutherAI/polyglot-ko-1.3b/result_2023-09-24 15:21:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2235494880546075, + "acc_stderr": 0.012174896631202605, + "acc_norm": 0.2815699658703072, + "acc_norm_stderr": 0.013143376735009015 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3345947022505477, + "acc_stderr": 0.004708842600177431, + "acc_norm": 0.4135630352519418, + "acc_norm_stderr": 0.0049146550633294974 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691585, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691585 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26947637292464877, + "acc_stderr": 0.015866243073215065, + "acc_norm": 0.26947637292464877, + "acc_norm_stderr": 0.015866243073215065 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039783, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039783 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.033293941190735296, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.033293941190735296 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2379421221864952, + "acc_stderr": 0.024185150647818707, + "acc_norm": 0.2379421221864952, + "acc_norm_stderr": 0.024185150647818707 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.030216831011508766, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.030216831011508766 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.030176808288974337, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.030176808288974337 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.02102067268082791, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.02102067268082791 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293752, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.027601921381417607, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.027601921381417607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889051968, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889051968 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587194, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587194 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357303, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357303 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.024477222856135114, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.024477222856135114 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.037752050135836386, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.037752050135836386 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.024288619466046102, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.024288619466046102 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.033550453048829226, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.033550453048829226 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.01728276069516743, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.01728276069516743 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.03106721126287249, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.03106721126287249 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2109704641350211, + "acc_stderr": 0.02655837250266192, + "acc_norm": 0.2109704641350211, + "acc_norm_stderr": 0.02655837250266192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23468057366362452, + "acc_stderr": 0.010824026872449344, + "acc_norm": 0.23468057366362452, + "acc_norm_stderr": 0.010824026872449344 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.03256866661681102, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.03256866661681102 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707682, + "mc2": 0.4116568832959107, + "mc2_stderr": 0.015044504977529799 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605975, + "acc_norm": 0.3400236127508855, + "acc_norm_stderr": 0.016286717220737674 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "EleutherAI/polyglot-ko-1.3b", + "model_sha": "557e162cf6e944fdbae05bab2e45d066a125eacb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/EleutherAI/polyglot-ko-12.8b/result_2023-09-26 09:55:07.json b/EleutherAI/polyglot-ko-12.8b/result_2023-09-26 09:55:07.json new file mode 100644 index 0000000000000000000000000000000000000000..d1a30257ad7684dfa258c0524fe16e2e6f6dec7c --- /dev/null +++ b/EleutherAI/polyglot-ko-12.8b/result_2023-09-26 09:55:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537365, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.385381398127863, + "acc_stderr": 0.004856906473719383, + "acc_norm": 0.5027882891854212, + "acc_norm_stderr": 0.004989703824167094 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.015671006009339572, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.015671006009339572 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039787, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185416, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185416 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03053289223393203, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03053289223393203 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438014, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438014 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868963, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868963 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026928, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026928 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.0309037969521145, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.0309037969521145 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239963, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239963 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.027601921381417604, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.027601921381417604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.026199808807561932, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.026199808807561932 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935554, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935554 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776578, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776578 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.036539469694421, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.036539469694421 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.022698657167855716, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.022698657167855716 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886338, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886338 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.0189041641715102, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.0189041641715102 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.02505850331695815, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.02505850331695815 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.20098039215686275, + "acc_stderr": 0.016211938889655574, + "acc_norm": 0.20098039215686275, + "acc_norm_stderr": 0.016211938889655574 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.02498710636564298, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.02498710636564298 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.026882144922307748, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.026882144922307748 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.030486039389105303, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.030486039389105303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25684485006518903, + "acc_stderr": 0.011158455853098857, + "acc_norm": 0.25684485006518903, + "acc_norm_stderr": 0.011158455853098857 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.032876667586034886, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.032876667586034886 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731613, + "mc2": 0.390667104295536, + "mc2_stderr": 0.014736649975849761 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.01589132050552089, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.0168363772928493 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "EleutherAI/polyglot-ko-12.8b", + "model_sha": "09dfc839067bf44e7f52976eca8adbc17f04e1b0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/EleutherAI/polyglot-ko-3.8b/result_2023-09-26 09:54:58.json b/EleutherAI/polyglot-ko-3.8b/result_2023-09-26 09:54:58.json new file mode 100644 index 0000000000000000000000000000000000000000..abb21b3618c51a6aab88db50984e75adfdd941cc --- /dev/null +++ b/EleutherAI/polyglot-ko-3.8b/result_2023-09-26 09:54:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2525597269624573, + "acc_stderr": 0.01269672898020771, + "acc_norm": 0.3046075085324232, + "acc_norm_stderr": 0.013449522109932494 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3511252738498307, + "acc_stderr": 0.004763465139038552, + "acc_norm": 0.4420434176458873, + "acc_norm_stderr": 0.004956147046108961 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03188578017686398, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03188578017686398 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23627075351213284, + "acc_stderr": 0.015190473717037497, + "acc_norm": 0.23627075351213284, + "acc_norm_stderr": 0.015190473717037497 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.026148818018424502, + "acc_norm": 0.2, + "acc_norm_stderr": 0.026148818018424502 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233135, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233135 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.024406162094668886, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.024406162094668886 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21524663677130046, + "acc_stderr": 0.027584066602208263, + "acc_norm": 0.21524663677130046, + "acc_norm_stderr": 0.027584066602208263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.19083969465648856, + "acc_stderr": 0.034465133507525954, + "acc_norm": 0.19083969465648856, + "acc_norm_stderr": 0.034465133507525954 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.024283140529467295, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.024283140529467295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.03194740072265541, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.03194740072265541 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671742, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671742 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3283018867924528, + "acc_stderr": 0.02890159361241178, + "acc_norm": 0.3283018867924528, + "acc_norm_stderr": 0.02890159361241178 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721376, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721376 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123567, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123567 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.024659685185967277, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.024659685185967277 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25688073394495414, + "acc_stderr": 0.018732492928342448, + "acc_norm": 0.25688073394495414, + "acc_norm_stderr": 0.018732492928342448 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.024954184324879905, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.024954184324879905 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1652892561983471, + "acc_stderr": 0.03390780612972776, + "acc_norm": 0.1652892561983471, + "acc_norm_stderr": 0.03390780612972776 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537537, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290396, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.03405702838185692, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.03405702838185692 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364546, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364546 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.02765215314415926, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.02765215314415926 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113912, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113912 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.03096451792692341, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.03096451792692341 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268046, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268046 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.40454723614569765, + "mc2_stderr": 0.014981033793701278 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.015473271583988433, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.016605801289212605 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "EleutherAI/polyglot-ko-3.8b", + "model_sha": "3c696a71c16b4a4622b7cabf6c5da4ba5a73b548", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/EleutherAI/polyglot-ko-5.8b/result_2023-09-24 15:21:38.json b/EleutherAI/polyglot-ko-5.8b/result_2023-09-24 15:21:38.json new file mode 100644 index 0000000000000000000000000000000000000000..7b3baeeb1120cc18456c3c4dc216ad5740f8b04a --- /dev/null +++ b/EleutherAI/polyglot-ko-5.8b/result_2023-09-24 15:21:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2687713310580205, + "acc_stderr": 0.012955065963710675, + "acc_norm": 0.32764505119453924, + "acc_norm_stderr": 0.013715847940719339 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3690499900418243, + "acc_stderr": 0.004815613144385398, + "acc_norm": 0.4814777932682733, + "acc_norm_stderr": 0.004986356526063965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041693, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041693 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20178799489144317, + "acc_stderr": 0.014351702181636861, + "acc_norm": 0.20178799489144317, + "acc_norm_stderr": 0.014351702181636861 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.17446808510638298, + "acc_stderr": 0.02480944233550398, + "acc_norm": 0.17446808510638298, + "acc_norm_stderr": 0.02480944233550398 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565264, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11659192825112108, + "acc_stderr": 0.021539639816244467, + "acc_norm": 0.11659192825112108, + "acc_norm_stderr": 0.021539639816244467 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.048108401480826346, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.048108401480826346 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493864, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493864 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3504587155963303, + "acc_stderr": 0.02045607759982446, + "acc_norm": 0.3504587155963303, + "acc_norm_stderr": 0.02045607759982446 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279053, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279053 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.03562367850095391, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.03562367850095391 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536934, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536934 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789834, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789834 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178475, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.01522589934082683, + "mc2": 0.3923103125697379, + "mc2_stderr": 0.014648106435610566 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2857142857142857, + "acc_stderr": 0.01553162078698674, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.016467706981527448 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "EleutherAI/polyglot-ko-5.8b", + "model_sha": "581a4c3eebfac23536b3c9676bcfb05c6a97baa2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/F24/F23-llama2-13B-x1/result_2023-11-24 10:19:15.json b/F24/F23-llama2-13B-x1/result_2023-11-24 10:19:15.json new file mode 100644 index 0000000000000000000000000000000000000000..bb70229e31cae1233fe568c3c4b6244335b3465a --- /dev/null +++ b/F24/F23-llama2-13B-x1/result_2023-11-24 10:19:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.013928933461382504, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398326 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4160525791674965, + "acc_stderr": 0.004918951019183889, + "acc_norm": 0.5650268870742879, + "acc_norm_stderr": 0.004947402907996247 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.01786933015400371, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.01786933015400371 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788683, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788683 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197608, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197608 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223264, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908223, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908223 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.03332769068410789, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.03332769068410789 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.02797605491534736, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.02797605491534736 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776285, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776285 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959316, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959316 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261114, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261114 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206167, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206167 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.021432956203453306, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.021432956203453306 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.02795604616542452, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.02795604616542452 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257017, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257017 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144696, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144696 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.03055531675557364, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.03055531675557364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29726205997392435, + "acc_stderr": 0.011673346173086045, + "acc_norm": 0.29726205997392435, + "acc_norm_stderr": 0.011673346173086045 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.43479566764760613, + "mc2_stderr": 0.014958184938646393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4604486422668241, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "F24/F23-llama2-13B-x1", + "model_sha": "90b8a06c768a8981c6368bcbd0294a9e0f92aa79", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/F24/llama-2-koen-13b-slimOrca/result_2023-12-03 09:10:05.json b/F24/llama-2-koen-13b-slimOrca/result_2023-12-03 09:10:05.json new file mode 100644 index 0000000000000000000000000000000000000000..a5187609220be56ef46a433614e4186f667daaa3 --- /dev/null +++ b/F24/llama-2-koen-13b-slimOrca/result_2023-12-03 09:10:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4061433447098976, + "acc_stderr": 0.014351656690097862, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.014570144495075581 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4186417048396734, + "acc_stderr": 0.0049232818418285165, + "acc_norm": 0.5636327424815774, + "acc_norm_stderr": 0.004949207947265917 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.01780208713585031, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.01780208713585031 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.0324498084999003, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.0324498084999003 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036544, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036544 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349476, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349476 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114982, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114982 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101803, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101803 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008585, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420078, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420078 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2670391061452514, + "acc_stderr": 0.014796502622562548, + "acc_norm": 0.2670391061452514, + "acc_norm_stderr": 0.014796502622562548 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280055, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280055 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104097, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104097 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.44254172455320107, + "mc2_stderr": 0.015186819172805456 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44510035419126326, + "acc_stderr": 0.017086417431005474, + "acc_norm": 0.4805194805194805, + "acc_norm_stderr": 0.01717730199234255 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "F24/llama-2-koen-13b-slimOrca", + "model_sha": "74138e08e67f4d1b710286b70399e75a4c03a511", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/F24/llama-2-koen-orca-mini-platypus2-math-13b/result_2023-12-03 09:38:38.json b/F24/llama-2-koen-orca-mini-platypus2-math-13b/result_2023-12-03 09:38:38.json new file mode 100644 index 0000000000000000000000000000000000000000..7e8255f1a21ca871cc7b8c78b69d6e574117ade6 --- /dev/null +++ b/F24/llama-2-koen-orca-mini-platypus2-math-13b/result_2023-12-03 09:38:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22696245733788395, + "acc_stderr": 0.01224049153613286, + "acc_norm": 0.22696245733788395, + "acc_norm_stderr": 0.01224049153613286 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 1.0, + "mc1_stderr": 0.0, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252247, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.014846044968252247 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "F24/llama-2-koen-orca-mini-platypus2-math-13b", + "model_sha": "b5ee3b5b459be0a3fd99d5050ed0d38653404690", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/FINDA-FIT/llama-2-ko-plain/result_2023-09-30 03:54:00.json b/FINDA-FIT/llama-2-ko-plain/result_2023-09-30 03:54:00.json new file mode 100644 index 0000000000000000000000000000000000000000..90045181537868b66022a7e29b971dde97f688e4 --- /dev/null +++ b/FINDA-FIT/llama-2-ko-plain/result_2023-09-30 03:54:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19539249146757678, + "acc_stderr": 0.011586907189952911, + "acc_norm": 0.2636518771331058, + "acc_norm_stderr": 0.012875929151297047 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2660824536944832, + "acc_stderr": 0.004410047530835032, + "acc_norm": 0.2788289185421231, + "acc_norm_stderr": 0.004475067344626752 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.04541609446503949, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.04541609446503949 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2720306513409962, + "acc_stderr": 0.015913367447500524, + "acc_norm": 0.2720306513409962, + "acc_norm_stderr": 0.015913367447500524 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.034554737023254366, + "acc_norm": 0.2, + "acc_norm_stderr": 0.034554737023254366 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.028957342788342347, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.028957342788342347 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.034106466140718564, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.034106466140718564 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632945, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632945 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2914798206278027, + "acc_stderr": 0.030500283176545902, + "acc_norm": 0.2914798206278027, + "acc_norm_stderr": 0.030500283176545902 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124505, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124505 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.036951833116502325, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.036951833116502325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931666, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.023454674889404285, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.023454674889404285 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553873, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553873 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.02619980880756193, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.02619980880756193 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871937, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871937 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.20398009950248755, + "acc_stderr": 0.02849317624532609, + "acc_norm": 0.20398009950248755, + "acc_norm_stderr": 0.02849317624532609 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173044, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173044 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.022894082489925992, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.022894082489925992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102148, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102148 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.025457756696667874, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.025457756696667874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148594, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148594 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.01431099954796145, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.01431099954796145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142804, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142804 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.02747974455080851, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.02747974455080851 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23663624511082137, + "acc_stderr": 0.010855137351572742, + "acc_norm": 0.23663624511082137, + "acc_norm_stderr": 0.010855137351572742 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591362, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842892, + "mc2": 0.5367542106571858, + "mc2_stderr": 0.01635449255335969 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1487603305785124, + "acc_stderr": 0.012234446131035063, + "acc_norm": 0.3860684769775679, + "acc_norm_stderr": 0.01673813076032174 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "FINDA-FIT/llama-2-ko-plain", + "model_sha": "091fe3550bfa49baaebda838c10825484580f89d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/FINDA-FIT/llama-ko-7b/result_2023-09-29 16:26:20.json b/FINDA-FIT/llama-ko-7b/result_2023-09-29 16:26:20.json new file mode 100644 index 0000000000000000000000000000000000000000..b7551097ac9913a99b9f740e98885dc86abb33b2 --- /dev/null +++ b/FINDA-FIT/llama-ko-7b/result_2023-09-29 16:26:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19795221843003413, + "acc_stderr": 0.011643990971573401, + "acc_norm": 0.26535836177474403, + "acc_norm_stderr": 0.012902554762313962 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2633937462656841, + "acc_stderr": 0.004395739495688583, + "acc_norm": 0.27823142800239, + "acc_norm_stderr": 0.004472121485161932 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.03989139859531771, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.03989139859531771 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777552, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777552 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073463, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073463 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.028185441301234102, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.028185441301234102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26688102893890675, + "acc_stderr": 0.025122637608816657, + "acc_norm": 0.26688102893890675, + "acc_norm_stderr": 0.025122637608816657 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2556053811659193, + "acc_stderr": 0.029275891003969923, + "acc_norm": 0.2556053811659193, + "acc_norm_stderr": 0.029275891003969923 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969195, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969195 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931666, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30512820512820515, + "acc_stderr": 0.023346335293325887, + "acc_norm": 0.30512820512820515, + "acc_norm_stderr": 0.023346335293325887 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826371, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826371 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.026377567028645858, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.026377567028645858 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106727, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106727 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721376, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721376 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.022894082489925992, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.022894082489925992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.018125669180861514, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.018125669180861514 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333337, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333337 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083497, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083497 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.034260594244031654, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.034260594244031654 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537537, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290396, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.01440029642922561, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.01440029642922561 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.03018753206032938, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.03018753206032938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.02747974455080851, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.02747974455080851 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2301173402868318, + "acc_stderr": 0.010750183177375553, + "acc_norm": 0.2301173402868318, + "acc_norm_stderr": 0.010750183177375553 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.03256866661681102, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.03256866661681102 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219381, + "mc2": 0.538620436654127, + "mc2_stderr": 0.016366108934105512 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.15230224321133412, + "acc_stderr": 0.01235345636132145, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.016637917789798735 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "FINDA-FIT/llama-ko-7b", + "model_sha": "c1f0b9f20d38c9494e1607bd30ce43da570d9d52", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/FINDA-FIT/llama-m/result_2023-09-30 08:24:55.json b/FINDA-FIT/llama-m/result_2023-09-30 08:24:55.json new file mode 100644 index 0000000000000000000000000000000000000000..5f04ec60250cdeebfb3bb1dfa186a44c9cc3b38c --- /dev/null +++ b/FINDA-FIT/llama-m/result_2023-09-30 08:24:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19539249146757678, + "acc_stderr": 0.01158690718995291, + "acc_norm": 0.2619453924914676, + "acc_norm_stderr": 0.012849054826858112 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2642899820752838, + "acc_stderr": 0.00440053218855021, + "acc_norm": 0.27763393746265685, + "acc_norm_stderr": 0.00446916572860033 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161549, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161549 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.015842430835269438, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.015842430835269438 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678316, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678316 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.028504856470514203, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.028504856470514203 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.0357160923005348, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.0357160923005348 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2797427652733119, + "acc_stderr": 0.02549425935069491, + "acc_norm": 0.2797427652733119, + "acc_norm_stderr": 0.02549425935069491 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.23318385650224216, + "acc_stderr": 0.028380391147094716, + "acc_norm": 0.23318385650224216, + "acc_norm_stderr": 0.028380391147094716 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969195, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969195 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932032, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932032 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3282051282051282, + "acc_stderr": 0.02380763319865727, + "acc_norm": 0.3282051282051282, + "acc_norm_stderr": 0.02380763319865727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826371, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826371 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553873, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553873 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.025907897122408173, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.025907897122408173 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724138, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724138 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.0294752502360172, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.0294752502360172 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173043, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173043 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.022894082489925992, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.022894082489925992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.032396370467357015, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.032396370467357015 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24036697247706423, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.24036697247706423, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2238562091503268, + "acc_stderr": 0.016863008585416617, + "acc_norm": 0.2238562091503268, + "acc_norm_stderr": 0.016863008585416617 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290396, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553983, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553983 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.02812342933514279, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.02812342933514279 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22362869198312235, + "acc_stderr": 0.027123298205229972, + "acc_norm": 0.22362869198312235, + "acc_norm_stderr": 0.027123298205229972 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2333767926988266, + "acc_stderr": 0.010803108481179088, + "acc_norm": 0.2333767926988266, + "acc_norm_stderr": 0.010803108481179088 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603489, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603489 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219385, + "mc2": 0.5382255654218452, + "mc2_stderr": 0.01636582464762524 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1487603305785124, + "acc_stderr": 0.012234446131035059, + "acc_norm": 0.3789846517119244, + "acc_norm_stderr": 0.016679260684229286 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "FINDA-FIT/llama-m", + "model_sha": "7c06c7acb6bd18e1cf52846483e430def93686f2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/FINDA-FIT/llama-p/result_2023-09-30 17:05:38.json b/FINDA-FIT/llama-p/result_2023-09-30 17:05:38.json new file mode 100644 index 0000000000000000000000000000000000000000..41a758a2ce06da144ca0d3d59eb72ea527ebe4ea --- /dev/null +++ b/FINDA-FIT/llama-p/result_2023-09-30 17:05:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3395904436860068, + "acc_stderr": 0.013839039762820169, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.014291228393536588 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38856801433977295, + "acc_stderr": 0.004864286176731823, + "acc_norm": 0.5073690499900418, + "acc_norm_stderr": 0.004989239462835233 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259263996, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259263996 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3946360153256705, + "acc_stderr": 0.017478464305911545, + "acc_norm": 0.3946360153256705, + "acc_norm_stderr": 0.017478464305911545 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102956, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102956 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.027950481494401266, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.027950481494401266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138621, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138621 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.03424084669891523, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.03424084669891523 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.030684737115135367, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.030684737115135367 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.258974358974359, + "acc_stderr": 0.02221110681006167, + "acc_norm": 0.258974358974359, + "acc_norm_stderr": 0.02221110681006167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567104, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567104 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5512820512820513, + "acc_stderr": 0.032583346493868806, + "acc_norm": 0.5512820512820513, + "acc_norm_stderr": 0.032583346493868806 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35094339622641507, + "acc_stderr": 0.029373646253234686, + "acc_norm": 0.35094339622641507, + "acc_norm_stderr": 0.029373646253234686 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.046737523336702384, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.046737523336702384 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367746, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367746 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4427860696517413, + "acc_stderr": 0.03512310964123936, + "acc_norm": 0.4427860696517413, + "acc_norm_stderr": 0.03512310964123936 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3901734104046243, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.3901734104046243, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.02716368603827123, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.02716368603827123 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32642487046632124, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.32642487046632124, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3743119266055046, + "acc_stderr": 0.02074895940898831, + "acc_norm": 0.3743119266055046, + "acc_norm_stderr": 0.02074895940898831 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790604, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790604 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.045077322787750874, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.045077322787750874 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866346, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866346 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.02833295951403124, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.02833295951403124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2985658409387223, + "acc_stderr": 0.011688060141794208, + "acc_norm": 0.2985658409387223, + "acc_norm_stderr": 0.011688060141794208 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.38092210327853554, + "mc2_stderr": 0.014881931344043989 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24675324675324675, + "acc_stderr": 0.014822275820015236, + "acc_norm": 0.31286894923258557, + "acc_norm_stderr": 0.015941010118302658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "FINDA-FIT/llama-p", + "model_sha": "e54c345988c60cdafe797a2f15e916801ee4ab7b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/FINDA-FIT/llama-r/result_2023-09-30 09:12:26.json b/FINDA-FIT/llama-r/result_2023-09-30 09:12:26.json new file mode 100644 index 0000000000000000000000000000000000000000..32ef0163e5f33b0723deeea151c8dc11afdb0874 --- /dev/null +++ b/FINDA-FIT/llama-r/result_2023-09-30 09:12:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20136518771331058, + "acc_stderr": 0.011718927477444262, + "acc_norm": 0.2636518771331058, + "acc_norm_stderr": 0.01287592915129705 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2665803624775941, + "acc_stderr": 0.004412674170976469, + "acc_norm": 0.27922724556861184, + "acc_norm_stderr": 0.004477025762200596 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2046783625730994, + "acc_stderr": 0.03094445977853321, + "acc_norm": 0.2046783625730994, + "acc_norm_stderr": 0.03094445977853321 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.04541609446503949, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.04541609446503949 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2656449553001277, + "acc_stderr": 0.01579430248788873, + "acc_norm": 0.2656449553001277, + "acc_norm_stderr": 0.01579430248788873 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678316, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678316 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.028185441301234113, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.028185441301234113 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.034605799075530255, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.034605799075530255 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.02592237178881877, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.02592237178881877 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.23766816143497757, + "acc_stderr": 0.028568079464714267, + "acc_norm": 0.23766816143497757, + "acc_norm_stderr": 0.028568079464714267 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932032, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932032 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727771, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727771 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.03068473711513536, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.03068473711513536 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3153846153846154, + "acc_stderr": 0.02355964698318994, + "acc_norm": 0.3153846153846154, + "acc_norm_stderr": 0.02355964698318994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.026377567028645858, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.026377567028645858 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724138, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724138 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712177, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712177 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.19402985074626866, + "acc_stderr": 0.027962677604768893, + "acc_norm": 0.19402985074626866, + "acc_norm_stderr": 0.027962677604768893 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566018, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566018 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071134, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071134 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.0325771407770966, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.0325771407770966 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23119266055045873, + "acc_stderr": 0.018075750241633163, + "acc_norm": 0.23119266055045873, + "acc_norm_stderr": 0.018075750241633163 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333337, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333337 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.024954184324879912, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.024954184324879912 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.036117805602848975, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.036117805602848975 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22712418300653595, + "acc_stderr": 0.016949853279212373, + "acc_norm": 0.22712418300653595, + "acc_norm_stderr": 0.016949853279212373 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953777, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953777 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985993, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985993 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22784810126582278, + "acc_stderr": 0.02730348459906942, + "acc_norm": 0.22784810126582278, + "acc_norm_stderr": 0.02730348459906942 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.010865436690780272, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.010865436690780272 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.01561651849721938, + "mc2": 0.5406294687690661, + "mc2_stderr": 0.016334114258114155 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.14994096812278632, + "acc_stderr": 0.012274378656217328, + "acc_norm": 0.3872491145218418, + "acc_norm_stderr": 0.016747577991642792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "FINDA-FIT/llama-r", + "model_sha": "6bdde9a227da60c2db803024d5b2e3a53a41cf0b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-dpo-v3/result_2023-12-19 14:26:44.json b/GAI-LLM/Yi-Ko-6B-dpo-v3/result_2023-12-19 14:26:44.json new file mode 100644 index 0000000000000000000000000000000000000000..8ee76ff7ee2bd1d93a44b06aa94a4551f72ac4d8 --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-dpo-v3/result_2023-12-19 14:26:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3677474402730375, + "acc_stderr": 0.014090995618168485, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.014487986197186052 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3979286994622585, + "acc_stderr": 0.004884702412456099, + "acc_norm": 0.5377414857598088, + "acc_norm_stderr": 0.004975546018950673 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.0178020871358503, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.0178020871358503 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933914, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933914 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5193548387096775, + "acc_stderr": 0.02842268740431211, + "acc_norm": 0.5193548387096775, + "acc_norm_stderr": 0.02842268740431211 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.0275285992103405, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.0275285992103405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255168, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255168 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523867, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523867 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5963302752293578, + "acc_stderr": 0.02103570485657497, + "acc_norm": 0.5963302752293578, + "acc_norm_stderr": 0.02103570485657497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400355, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.019706875804085627, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.019706875804085627 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28938547486033517, + "acc_stderr": 0.015166544550490305, + "acc_norm": 0.28938547486033517, + "acc_norm_stderr": 0.015166544550490305 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714854, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714854 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.011971507294982775, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.011971507294982775 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.034956245220154766, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.034956245220154766 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.45135776407373196, + "mc2_stderr": 0.015142660341000317 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5655253837072018, + "acc_stderr": 0.017042098620824925, + "acc_norm": 0.6257378984651711, + "acc_norm_stderr": 0.016637917789798742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-dpo-v3", + "model_sha": "9abe61ce6ef0fcdc77e2b2d87bf85b9c83dda19d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-dpo-v4/result_2023-12-22 00:26:06.json b/GAI-LLM/Yi-Ko-6B-dpo-v4/result_2023-12-22 00:26:06.json new file mode 100644 index 0000000000000000000000000000000000000000..c41d72824b9fe717e2a6bac44a65e78bb497bb83 --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-dpo-v4/result_2023-12-22 00:26:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.01415063143511173, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.014487986197186052 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4004182433778132, + "acc_stderr": 0.0048898174897396935, + "acc_norm": 0.5360485958972316, + "acc_norm_stderr": 0.004976796060456437 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5568326947637292, + "acc_stderr": 0.01776408503534841, + "acc_norm": 0.5568326947637292, + "acc_norm_stderr": 0.01776408503534841 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079023, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431183, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524582, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524582 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268815, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268815 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.616580310880829, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.616580310880829, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6201834862385321, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.6201834862385321, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400355, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.01979448890002411, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.01979448890002411 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611324, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611324 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802747, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802747 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2905027932960894, + "acc_stderr": 0.01518384430720616, + "acc_norm": 0.2905027932960894, + "acc_norm_stderr": 0.01518384430720616 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0301619119307671, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0301619119307671 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301843, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301843 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.011901895635786084, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.011901895635786084 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4453706082481084, + "mc2_stderr": 0.015180178951498797 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5926800472255017, + "acc_stderr": 0.01689245669519127, + "acc_norm": 0.6304604486422668, + "acc_norm_stderr": 0.016594883405685438 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-dpo-v4", + "model_sha": "88d9402eaf923e49b72f09ecefca91705e3e3d01", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-mixed-v11/result_2023-12-21 05:30:07.json b/GAI-LLM/Yi-Ko-6B-mixed-v11/result_2023-12-21 05:30:07.json new file mode 100644 index 0000000000000000000000000000000000000000..e95f945c3f4019d95b6f4b565d1d4e4a965d5fdf --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-mixed-v11/result_2023-12-21 05:30:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3575085324232082, + "acc_stderr": 0.014005494275916573, + "acc_norm": 0.4206484641638225, + "acc_norm_stderr": 0.014426211252508401 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3994224258115913, + "acc_stderr": 0.004887787255353492, + "acc_norm": 0.5336586337382991, + "acc_norm_stderr": 0.004978462690966916 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5504469987228607, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.5504469987228607, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412188, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412188 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920938, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920938 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.634862385321101, + "acc_stderr": 0.020642801454383998, + "acc_norm": 0.634862385321101, + "acc_norm_stderr": 0.020642801454383998 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02862441255016795, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02862441255016795 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787317, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787317 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763126, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763126 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.03121956944530185, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.03121956944530185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.01196531153657153, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.01196531153657153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.038783721137112745, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.038783721137112745 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557956, + "mc2": 0.4121524749596002, + "mc2_stderr": 0.014822327944942062 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5785123966942148, + "acc_stderr": 0.016977101932601518, + "acc_norm": 0.6269185360094451, + "acc_norm_stderr": 0.016627318275137443 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-mixed-v11", + "model_sha": "ca827b1389e67a8b8d8581d8f6f3accd4866ba11", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-smash-dpo/result_2023-12-29 06:56:37.json b/GAI-LLM/Yi-Ko-6B-smash-dpo/result_2023-12-29 06:56:37.json new file mode 100644 index 0000000000000000000000000000000000000000..c06377ce0e9b6f8ad87a837604e7af83b27a2f7a --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-smash-dpo/result_2023-12-29 06:56:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36689419795221845, + "acc_stderr": 0.014084133118104296, + "acc_norm": 0.41638225255972694, + "acc_norm_stderr": 0.014405618279436172 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4124676359290978, + "acc_stderr": 0.004912723848944785, + "acc_norm": 0.5473013343955387, + "acc_norm_stderr": 0.004967402792744853 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.017829131764287187, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.017829131764287187 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816503, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.036807836907275814, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.036807836907275814 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.034648816750163375, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.034648816750163375 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868547, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868547 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114982, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114982 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.03468343295111126, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.03468343295111126 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283649, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283649 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342658, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342658 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637793, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637793 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.035339990940656964, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.035339990940656964 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6018348623853211, + "acc_stderr": 0.020987989422654257, + "acc_norm": 0.6018348623853211, + "acc_norm_stderr": 0.020987989422654257 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.02858034106513829, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.02858034106513829 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024113, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024113 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320196, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320196 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.040073418097558065, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.040073418097558065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.032149521478027486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.032149521478027486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409167, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409167 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31029986962190353, + "acc_stderr": 0.011815439293469832, + "acc_norm": 0.31029986962190353, + "acc_norm_stderr": 0.011815439293469832 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35495716034271724, + "mc1_stderr": 0.0167508623813759, + "mc2": 0.5208407477265208, + "mc2_stderr": 0.015859200141974766 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.017190246276231853, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-smash-dpo", + "model_sha": "a28f3cc3bcb5b30bdfbac46949399f3119a3ffd0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-smash/result_2023-12-28 04:54:01.json b/GAI-LLM/Yi-Ko-6B-smash/result_2023-12-28 04:54:01.json new file mode 100644 index 0000000000000000000000000000000000000000..d19bf0d63bc74d22247ab327cfa07f6ba69d36b9 --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-smash/result_2023-12-28 04:54:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179344, + "acc_norm": 0.4138225255972696, + "acc_norm_stderr": 0.014392730009221009 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3966341366261701, + "acc_stderr": 0.004881990487628915, + "acc_norm": 0.534654451304521, + "acc_norm_stderr": 0.004977782217582457 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.017855434554041986, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.017855434554041986 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999365, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999365 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.02838474778881334, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.02838474778881334 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343118, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911498, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911498 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.024419234966819064, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.024419234966819064 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723368, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723368 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873632, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873632 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.019944914136873583, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.019944914136873583 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842974, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842974 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557956, + "mc2": 0.41345739770630174, + "mc2_stderr": 0.014785029688685922 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5714285714285714, + "acc_stderr": 0.01701403811929749, + "acc_norm": 0.6186540731995277, + "acc_norm_stderr": 0.016699301768828084 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-smash", + "model_sha": "8b0f29ce0c792414e986f2c8b1fe59d68cb874cd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B_mixed_v10/result_2023-12-19 13:50:11.json b/GAI-LLM/Yi-Ko-6B_mixed_v10/result_2023-12-19 13:50:11.json new file mode 100644 index 0000000000000000000000000000000000000000..de2069446a9bf3f77f7f20adf473f0f9fad47ec7 --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B_mixed_v10/result_2023-12-19 13:50:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35494880546075086, + "acc_stderr": 0.013983036904094089, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.014471133392642475 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39852619000199163, + "acc_stderr": 0.004885942040894558, + "acc_norm": 0.5380402310296754, + "acc_norm_stderr": 0.004975319435777093 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.017758800534214414, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.017758800534214414 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412202, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412202 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.028384747788813336, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.028384747788813336 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425082, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425082 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5246913580246914, + "acc_stderr": 0.027786800931427443, + "acc_norm": 0.5246913580246914, + "acc_norm_stderr": 0.027786800931427443 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.020828148517022603, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.020828148517022603 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089775, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375383, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402543, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402543 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301847, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704715997, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704715997 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087298, + "mc2": 0.41566543261161853, + "mc2_stderr": 0.014814793696578963 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5631641086186541, + "acc_stderr": 0.01705263355985607, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427122 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B_mixed_v10", + "model_sha": "5914204925b61986dfc048dcb78ccf5dc88d013e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/ko-en-llama2-13b-mixed-v10/result_2023-11-27 05:26:03.json b/GAI-LLM/ko-en-llama2-13b-mixed-v10/result_2023-11-27 05:26:03.json new file mode 100644 index 0000000000000000000000000000000000000000..26b4f2ff5eeaf41588d269cb2e52b1f49365b8d1 --- /dev/null +++ b/GAI-LLM/ko-en-llama2-13b-mixed-v10/result_2023-11-27 05:26:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910467, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4208325034853615, + "acc_stderr": 0.004926837572202161, + "acc_norm": 0.5668193586934873, + "acc_norm_stderr": 0.0049450236570322765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.01786209177850786, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.01786209177850786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502734, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736413, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736413 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412232, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412232 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079105, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079105 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698605, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698605 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.4226153874486002, + "mc2_stderr": 0.014807061474258669 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682868, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.016709165387228834 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/ko-en-llama2-13b-mixed-v10", + "model_sha": "e7ea3b47dd5fd7b9ce6573b18f9f5801f772017d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/ko-en-llama2-13b-mixed-v3/result_2023-10-23 00:18:31.json b/GAI-LLM/ko-en-llama2-13b-mixed-v3/result_2023-10-23 00:18:31.json new file mode 100644 index 0000000000000000000000000000000000000000..ad4b7ab1471e58276331b390c2b7148ea94dbe05 --- /dev/null +++ b/GAI-LLM/ko-en-llama2-13b-mixed-v3/result_2023-10-23 00:18:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3728668941979522, + "acc_stderr": 0.01413117676013117, + "acc_norm": 0.42406143344709896, + "acc_norm_stderr": 0.014441889627464394 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40689105755825533, + "acc_stderr": 0.004902502514738606, + "acc_norm": 0.5433180641306513, + "acc_norm_stderr": 0.004971019942726589 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.01787469866749135, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.01787469866749135 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.030363582197238167, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.030363582197238167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840678, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840678 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.02432173848460237, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.02432173848460237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561053, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561053 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5683760683760684, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.5683760683760684, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44954128440366975, + "acc_stderr": 0.021327881417823363, + "acc_norm": 0.44954128440366975, + "acc_norm_stderr": 0.021327881417823363 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604675, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604675 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283693, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283693 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.018771683893528176, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.018771683893528176 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4810126582278481, + "acc_stderr": 0.03252375148090448, + "acc_norm": 0.4810126582278481, + "acc_norm_stderr": 0.03252375148090448 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29726205997392435, + "acc_stderr": 0.011673346173086034, + "acc_norm": 0.29726205997392435, + "acc_norm_stderr": 0.011673346173086034 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283347, + "mc2": 0.41687077666896594, + "mc2_stderr": 0.014804732810744745 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.017107618859549353, + "acc_norm": 0.538370720188902, + "acc_norm_stderr": 0.01713966022184555 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/ko-en-llama2-13b-mixed-v3", + "model_sha": "c3e43fecfbbd3adc1ea335de10e23b90452cf081", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/ko-en-llama2-13b-mixed-v4/result_2023-10-26 04:39:06.json b/GAI-LLM/ko-en-llama2-13b-mixed-v4/result_2023-10-26 04:39:06.json new file mode 100644 index 0000000000000000000000000000000000000000..ce04ab8ad7d5ee28a818114c36f569b880c92bb9 --- /dev/null +++ b/GAI-LLM/ko-en-llama2-13b-mixed-v4/result_2023-10-26 04:39:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407166, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137998 + }, + "harness|ko_hellaswag|10": { + "acc": 0.407787293367855, + "acc_stderr": 0.004904189257891276, + "acc_norm": 0.5450109539932284, + "acc_norm_stderr": 0.004969521827957934 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5019157088122606, + "acc_stderr": 0.017879832259026677, + "acc_norm": 0.5019157088122606, + "acc_norm_stderr": 0.017879832259026677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134986, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134986 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052445, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052445 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5811965811965812, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.5811965811965812, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.03058805297427065, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.03058805297427065 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159664, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655795, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.037466683254700206, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.037466683254700206 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.315359477124183, + "acc_stderr": 0.018798086284886887, + "acc_norm": 0.315359477124183, + "acc_norm_stderr": 0.018798086284886887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631157, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560534, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.025187786660227276, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.025187786660227276 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.03254462010767859, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.03254462010767859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271824, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271824 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.01529807750948508, + "mc2": 0.42398241596571024, + "mc2_stderr": 0.014807345195706319 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836442, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/ko-en-llama2-13b-mixed-v4", + "model_sha": "2d3a564cd23d0e97bb0f3354a148ef57e313661a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/ko-en-llama2-13b-mixed-v5/result_2023-10-28 07:24:05.json b/GAI-LLM/ko-en-llama2-13b-mixed-v5/result_2023-10-28 07:24:05.json new file mode 100644 index 0000000000000000000000000000000000000000..99ec9e0086391dbc59d9412fa2d1c2820d3c4126 --- /dev/null +++ b/GAI-LLM/ko-en-llama2-13b-mixed-v5/result_2023-10-28 07:24:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111726, + "acc_norm": 0.431740614334471, + "acc_norm_stderr": 0.014474591427196202 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4082852021509659, + "acc_stderr": 0.00490511903984946, + "acc_norm": 0.5455088627763394, + "acc_norm_stderr": 0.004969070188763755 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5019157088122606, + "acc_stderr": 0.017879832259026677, + "acc_norm": 0.5019157088122606, + "acc_norm_stderr": 0.017879832259026677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134986, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134986 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413925, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413925 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.024503472557110956, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.024503472557110956 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054064, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054064 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5726495726495726, + "acc_stderr": 0.032408473935163266, + "acc_norm": 0.5726495726495726, + "acc_norm_stderr": 0.032408473935163266 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0365634365335316, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0365634365335316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.430635838150289, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.430635838150289, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656206, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656206 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44587155963302755, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.44587155963302755, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706207, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516994, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353605, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353605 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.025035845227711247, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.025035845227711247 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.510548523206751, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.510548523206751, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271824, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271824 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156463, + "mc2": 0.4218804524380662, + "mc2_stderr": 0.01476995927431319 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.017072525875563106 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/ko-en-llama2-13b-mixed-v5", + "model_sha": "6e6de7e1907464bd5dc9c9c9fd312983a0611cfb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-dpo-v1/result_2023-11-20 09:28:18.json b/GAI-LLM/llama-2-koen-13b-dpo-v1/result_2023-11-20 09:28:18.json new file mode 100644 index 0000000000000000000000000000000000000000..6d3d9904a2a25aa833af91019d3e7616776120aa --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-dpo-v1/result_2023-11-20 09:28:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38139931740614336, + "acc_stderr": 0.014194389086685247, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.014537144444284745 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41744672376020714, + "acc_stderr": 0.004921300331285571, + "acc_norm": 0.5662218681537542, + "acc_norm_stderr": 0.004945824056501814 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365776, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365776 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.01786209177850786, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.01786209177850786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.02496268356433182, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.02496268356433182 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849734, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.02143295620345332, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.02143295620345332 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147124, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147124 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505528, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505528 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.0317987634217685, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.0317987634217685 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03032024326500413, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03032024326500413 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2835723598435463, + "acc_stderr": 0.011511900775968332, + "acc_norm": 0.2835723598435463, + "acc_norm_stderr": 0.011511900775968332 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247271, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247271 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015016, + "mc2": 0.4243531644307249, + "mc2_stderr": 0.014820029237903914 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654273, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.016756921571069425 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-dpo-v1", + "model_sha": "c5b1d6d2c7e6d6d943453c1ea6e255ecf03a0fe4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-dpo-v2/result_2023-11-30 02:23:53.json b/GAI-LLM/llama-2-koen-13b-dpo-v2/result_2023-11-30 02:23:53.json new file mode 100644 index 0000000000000000000000000000000000000000..4994c6f4eb19345d18461bc334209ed09080c4ef --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-dpo-v2/result_2023-11-30 02:23:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910467, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4208325034853615, + "acc_stderr": 0.004926837572202161, + "acc_norm": 0.5668193586934873, + "acc_norm_stderr": 0.0049450236570322765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.01786209177850786, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.01786209177850786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502734, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736413, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736413 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412232, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412232 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079105, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079105 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698605, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698605 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.4226153874486002, + "mc2_stderr": 0.014807061474258669 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682868, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.016709165387228834 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-dpo-v2", + "model_sha": "34736dec8153702c2c07e0265b702a29ca65178c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-dpo-v3/result_2023-12-05 00:42:00.json b/GAI-LLM/llama-2-koen-13b-dpo-v3/result_2023-12-05 00:42:00.json new file mode 100644 index 0000000000000000000000000000000000000000..bfab5930f4c10ecf440111f9410ad916e9796655 --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-dpo-v3/result_2023-12-05 00:42:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910467, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4208325034853615, + "acc_stderr": 0.004926837572202161, + "acc_norm": 0.5668193586934873, + "acc_norm_stderr": 0.0049450236570322765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.01786209177850786, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.01786209177850786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502734, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736413, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736413 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412232, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412232 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079105, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079105 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698605, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698605 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.4226153874486002, + "mc2_stderr": 0.014807061474258669 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682868, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.016709165387228834 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-dpo-v3", + "model_sha": "ab13dffe5b9091d09383c3f57cff37e0503a7dc5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-dpo-v3_2/result_2023-12-12 05:55:00.json b/GAI-LLM/llama-2-koen-13b-dpo-v3_2/result_2023-12-12 05:55:00.json new file mode 100644 index 0000000000000000000000000000000000000000..bba24e7be7db796d3cf9f934a64c1844652234c4 --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-dpo-v3_2/result_2023-12-12 05:55:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938215, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41963752240589525, + "acc_stderr": 0.00492491043310636, + "acc_norm": 0.5681139215295757, + "acc_norm_stderr": 0.0049432643398686525 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.01785041079438017, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.01785041079438017 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840684, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840684 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.024666744915187232, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.024666744915187232 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347357, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347357 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261107, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261107 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5266055045871559, + "acc_stderr": 0.021406952688151588, + "acc_norm": 0.5266055045871559, + "acc_norm_stderr": 0.021406952688151588 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392868, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392868 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924803, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924803 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503807, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503807 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791044, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.01155933735570851, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.01155933735570851 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.034267123492472705, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.034267123492472705 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777305, + "mc2": 0.4588099760274036, + "mc2_stderr": 0.015066495237883525 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5572609208972845, + "acc_stderr": 0.01707725413155622, + "acc_norm": 0.6469893742621016, + "acc_norm_stderr": 0.01643074598242715 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-dpo-v3_2", + "model_sha": "56d6c983276c785d9af25d028abc39d04fbd31d2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-mixed-v11/result_2023-12-06 04:09:10.json b/GAI-LLM/llama-2-koen-13b-mixed-v11/result_2023-12-06 04:09:10.json new file mode 100644 index 0000000000000000000000000000000000000000..a6bd0f6907256a6ea5a8c123a4a161c59e175d0b --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-mixed-v11/result_2023-12-06 04:09:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979274, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.014537144444284746 + }, + "harness|ko_hellaswag|10": { + "acc": 0.418442541326429, + "acc_stderr": 0.004922953651577685, + "acc_norm": 0.5646285600477993, + "acc_norm_stderr": 0.004947922692688838 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5210727969348659, + "acc_stderr": 0.017864076786212907, + "acc_norm": 0.5210727969348659, + "acc_norm_stderr": 0.017864076786212907 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.024915243985987844, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.024915243985987844 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392868, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392868 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577447, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114024, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114024 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.042032772914677614, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.042032772914677614 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012386, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012386 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29335071707953064, + "acc_stderr": 0.011628520449582076, + "acc_norm": 0.29335071707953064, + "acc_norm_stderr": 0.011628520449582076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087295, + "mc2": 0.4223073081878361, + "mc2_stderr": 0.01480298314658298 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5230224321133412, + "acc_stderr": 0.01717212154672763, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427122 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-mixed-v11", + "model_sha": "d309fbc21d29f4cfb41d2506c406244cb11e78f1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-mixed-v11_2/result_2023-12-11 01:53:50.json b/GAI-LLM/llama-2-koen-13b-mixed-v11_2/result_2023-12-11 01:53:50.json new file mode 100644 index 0000000000000000000000000000000000000000..a587b2f51da018096277ec50c700fb9760546cf5 --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-mixed-v11_2/result_2023-12-11 01:53:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38139931740614336, + "acc_stderr": 0.014194389086685247, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41963752240589525, + "acc_stderr": 0.004924910433106359, + "acc_norm": 0.566122286397132, + "acc_norm_stderr": 0.004945956744943813 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5351213282247765, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.5351213282247765, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.024838811988033158, + "acc_norm": 0.4, + "acc_norm_stderr": 0.024838811988033158 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717862, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717862 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.02138786335035399, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.02138786335035399 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488795, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488795 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281508, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281508 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044791, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044791 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29595827900912647, + "acc_stderr": 0.011658518525277054, + "acc_norm": 0.29595827900912647, + "acc_norm_stderr": 0.011658518525277054 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842885, + "mc2": 0.42557508687226114, + "mc2_stderr": 0.014810504388914819 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.525383707201889, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.6092089728453365, + "acc_norm_stderr": 0.016775298465108265 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-mixed-v11_2", + "model_sha": "da7c55c72f9f911022709d710972972beef327a4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-mixed-v7/result_2023-11-03 01:38:04.json b/GAI-LLM/llama-2-koen-13b-mixed-v7/result_2023-11-03 01:38:04.json new file mode 100644 index 0000000000000000000000000000000000000000..b28d107e4df9eecbd6b2444c601558baf28e068a --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-mixed-v7/result_2023-11-03 01:38:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3796928327645051, + "acc_stderr": 0.014182119866974874, + "acc_norm": 0.44795221843003413, + "acc_norm_stderr": 0.014532011498211669 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4171479784903406, + "acc_stderr": 0.004920800313232745, + "acc_norm": 0.5664210316669986, + "acc_norm_stderr": 0.004945558069852528 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365776, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365776 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.01786209177850786, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.01786209177850786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102315, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833713, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833713 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.02143295620345332, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.02143295620345332 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505528, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505528 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.0317987634217685, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.0317987634217685 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.46691176470588236, + "acc_stderr": 0.030306257722468314, + "acc_norm": 0.46691176470588236, + "acc_norm_stderr": 0.030306257722468314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.03241920684693334, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.03241920684693334 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28683181225554105, + "acc_stderr": 0.011551504781176935, + "acc_norm": 0.28683181225554105, + "acc_norm_stderr": 0.011551504781176935 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247271, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247271 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015016, + "mc2": 0.42428555727008455, + "mc2_stderr": 0.014819025436428698 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5112160566706021, + "acc_stderr": 0.01718602846948929, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.016738130760321757 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-mixed-v7", + "model_sha": "8584de207645e9fed63c76e8e4718fb46e393c3c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-mixed-v8/result_2023-11-08 10:03:55.json b/GAI-LLM/llama-2-koen-13b-mixed-v8/result_2023-11-08 10:03:55.json new file mode 100644 index 0000000000000000000000000000000000000000..dffefdf38678b85a6811d48a699bf24e404f9ed6 --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-mixed-v8/result_2023-11-08 10:03:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38139931740614336, + "acc_stderr": 0.014194389086685251, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41326428998207526, + "acc_stderr": 0.004914130855431776, + "acc_norm": 0.5622385978888668, + "acc_norm_stderr": 0.0049509732311887366 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.017870847506081734, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.017870847506081734 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.025174048384000777, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.025174048384000777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.031426169937919246, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.031426169937919246 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489358, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489358 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.011717148751648436, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.011717148751648436 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156474, + "mc2": 0.40988199072836734, + "mc2_stderr": 0.014730658051782728 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4592680047225502, + "acc_stderr": 0.01713321827653767, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.017115418225226872 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-mixed-v8", + "model_sha": "bc460419cfb0d80c3078ebedf761d8fa69e09eeb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-mixed-v9/result_2023-11-16 04:14:01.json b/GAI-LLM/llama-2-koen-13b-mixed-v9/result_2023-11-16 04:14:01.json new file mode 100644 index 0000000000000000000000000000000000000000..c3585a04358b79fea2a40235153280defb181d9b --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-mixed-v9/result_2023-11-16 04:14:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000326, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526842 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4170483967337184, + "acc_stderr": 0.004920633227844466, + "acc_norm": 0.5650268870742879, + "acc_norm_stderr": 0.004947402907996248 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365776, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365776 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5236270753512133, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.5236270753512133, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.0382840111507902, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.0382840111507902 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461224, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461224 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577657, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831027, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831027 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824096, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824096 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.01955964680921592, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.01955964680921592 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298804, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03032024326500413, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03032024326500413 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2953063885267275, + "acc_stderr": 0.011651061936208828, + "acc_norm": 0.2953063885267275, + "acc_norm_stderr": 0.011651061936208828 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719128, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719128 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237024, + "mc2": 0.42083991681473415, + "mc2_stderr": 0.014797986734621882 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077304, + "acc_norm": 0.6068476977567887, + "acc_norm_stderr": 0.01679326280128707 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-mixed-v9", + "model_sha": "0d3fe7df627660f041bd73a62362898e05b67196", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/polyglot-12.8b-mixed-v3/result_2023-10-26 01:40:33.json b/GAI-LLM/polyglot-12.8b-mixed-v3/result_2023-10-26 01:40:33.json new file mode 100644 index 0000000000000000000000000000000000000000..49ae4676386049263d858c18e9af37e6f14a2c5b --- /dev/null +++ b/GAI-LLM/polyglot-12.8b-mixed-v3/result_2023-10-26 01:40:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537365, + "acc_norm": 0.33447098976109213, + "acc_norm_stderr": 0.013787460322441374 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38548097988448515, + "acc_stderr": 0.004857140410776749, + "acc_norm": 0.5028878709420435, + "acc_norm_stderr": 0.004989698183207817 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.015671006009339572, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.015671006009339572 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039787, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185416, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185416 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03053289223393203, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03053289223393203 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868963, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868963 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026928, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026928 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.0309037969521145, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.0309037969521145 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239963, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239963 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.027601921381417604, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.027601921381417604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.026199808807561932, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.026199808807561932 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935554, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935554 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776578, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776578 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.036539469694421, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.036539469694421 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.022698657167855716, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.022698657167855716 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886338, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886338 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.0189041641715102, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.0189041641715102 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.02505850331695815, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.02505850331695815 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.20098039215686275, + "acc_stderr": 0.016211938889655574, + "acc_norm": 0.20098039215686275, + "acc_norm_stderr": 0.016211938889655574 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.02498710636564298, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.02498710636564298 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.026882144922307748, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.026882144922307748 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.030486039389105303, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.030486039389105303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25749674054758803, + "acc_stderr": 0.011167706014904138, + "acc_norm": 0.25749674054758803, + "acc_norm_stderr": 0.011167706014904138 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.032876667586034886, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.032876667586034886 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731613, + "mc2": 0.39066594086735945, + "mc2_stderr": 0.014736605286215685 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.01589132050552089, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.0168363772928493 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/polyglot-12.8b-mixed-v3", + "model_sha": "ab3c7b46c35cebb556b448db959d885a99f31220", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GUSSSSSSSSSSS/polyglot-ko-12.8b-instruction/result_2023-11-06 04:46:28.json b/GUSSSSSSSSSSS/polyglot-ko-12.8b-instruction/result_2023-11-06 04:46:28.json new file mode 100644 index 0000000000000000000000000000000000000000..705573b694e9d93486430966533a58f3cf1388b5 --- /dev/null +++ b/GUSSSSSSSSSSS/polyglot-ko-12.8b-instruction/result_2023-11-06 04:46:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24829351535836178, + "acc_stderr": 0.012624912868089764, + "acc_norm": 0.2858361774744027, + "acc_norm_stderr": 0.013203196088537369 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35371439952200756, + "acc_stderr": 0.004771447244095125, + "acc_norm": 0.4420434176458873, + "acc_norm_stderr": 0.004956147046108963 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038245, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038245 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.040580420156460344, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.040580420156460344 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.015842430835269445, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.015842430835269445 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678317, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678317 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.027678452578212373, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.027678452578212373 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064536, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.18385650224215247, + "acc_stderr": 0.025998379092356513, + "acc_norm": 0.18385650224215247, + "acc_norm_stderr": 0.025998379092356513 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774632, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774632 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.10784313725490197, + "acc_stderr": 0.030864282122060136, + "acc_norm": 0.10784313725490197, + "acc_norm_stderr": 0.030864282122060136 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.02788682807838056, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.02788682807838056 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24102564102564103, + "acc_stderr": 0.0216855466653332, + "acc_norm": 0.24102564102564103, + "acc_norm_stderr": 0.0216855466653332 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.03893542518824847, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.03893542518824847 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.028748983689941072, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.028748983689941072 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.025091892378859275, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.025091892378859275 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106727, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106727 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833713, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833713 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.03280188205348642, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.03280188205348642 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036622, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036622 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.023618678310069363, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.023618678310069363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.025842248700902168, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.025842248700902168 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.037124548537213684, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.037124548537213684 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21100917431192662, + "acc_stderr": 0.01749392240411265, + "acc_norm": 0.21100917431192662, + "acc_norm_stderr": 0.01749392240411265 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.024630048979824765, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.024630048979824765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.032790004063100515, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.032790004063100515 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25326797385620914, + "acc_stderr": 0.01759348689536683, + "acc_norm": 0.25326797385620914, + "acc_norm_stderr": 0.01759348689536683 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729903, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729903 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079103, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079103 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28156424581005585, + "acc_stderr": 0.015042290171866132, + "acc_norm": 0.28156424581005585, + "acc_norm_stderr": 0.015042290171866132 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225418, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225418 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.02500025603954621, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.02500025603954621 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24185136897001303, + "acc_stderr": 0.010936550813827054, + "acc_norm": 0.24185136897001303, + "acc_norm_stderr": 0.010936550813827054 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816525, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816525 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22399020807833536, + "mc1_stderr": 0.014594964329474203, + "mc2": 0.4106638009419967, + "mc2_stderr": 0.015724386722290755 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.01602142705530959, + "acc_norm": 0.38488783943329397, + "acc_norm_stderr": 0.01672857970149866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GUSSSSSSSSSSS/polyglot-ko-12.8b-instruction", + "model_sha": "5dd983e0688b676b814f4b9a02810de2d31dafb3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HAERAE-HUB/hae-tae_v0.1.1/result_2023-09-30 11:46:43.json b/HAERAE-HUB/hae-tae_v0.1.1/result_2023-09-30 11:46:43.json new file mode 100644 index 0000000000000000000000000000000000000000..f108fe44a38bc3752e38bae2396c9767a3ab3508 --- /dev/null +++ b/HAERAE-HUB/hae-tae_v0.1.1/result_2023-09-30 11:46:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28242320819112626, + "acc_stderr": 0.01315545688409722, + "acc_norm": 0.3302047781569966, + "acc_norm_stderr": 0.013743085603760422 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3719378609838678, + "acc_stderr": 0.004823341569605419, + "acc_norm": 0.4821748655646286, + "acc_norm_stderr": 0.0049866095427490405 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041693, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041693 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20051085568326948, + "acc_stderr": 0.014317653708594209, + "acc_norm": 0.20051085568326948, + "acc_norm_stderr": 0.014317653708594209 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.16170212765957448, + "acc_stderr": 0.02406850528969531, + "acc_norm": 0.16170212765957448, + "acc_norm_stderr": 0.02406850528969531 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565264, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11659192825112108, + "acc_stderr": 0.021539639816244467, + "acc_norm": 0.11659192825112108, + "acc_norm_stderr": 0.021539639816244467 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.024359581465396983, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.024359581465396983 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443866, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443866 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163334, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163334 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.344954128440367, + "acc_stderr": 0.02038060540506697, + "acc_norm": 0.344954128440367, + "acc_norm_stderr": 0.02038060540506697 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.02609016250427905, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.02609016250427905 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.15702479338842976, + "acc_stderr": 0.03321244842547128, + "acc_norm": 0.15702479338842976, + "acc_norm_stderr": 0.03321244842547128 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.033213611069662696, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.033213611069662696 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036847, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036847 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3183673469387755, + "acc_stderr": 0.02982253379398209, + "acc_norm": 0.3183673469387755, + "acc_norm_stderr": 0.02982253379398209 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20675105485232068, + "acc_stderr": 0.026361651668389087, + "acc_norm": 0.20675105485232068, + "acc_norm_stderr": 0.026361651668389087 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2470664928292047, + "acc_stderr": 0.011015752255279329, + "acc_norm": 0.2470664928292047, + "acc_norm_stderr": 0.011015752255279329 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156475, + "mc2": 0.3974526680083883, + "mc2_stderr": 0.01475058288914894 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29279811097992914, + "acc_stderr": 0.015644823205401334, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.016467706981527445 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HAERAE-HUB/hae-tae_v0.1.1", + "model_sha": "4ae77d9659bb11f158180f4b8b243d1e9ddb51f4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HAERAE-HUB/hae-tae_v0.1.2/result_2023-09-30 11:46:34.json b/HAERAE-HUB/hae-tae_v0.1.2/result_2023-09-30 11:46:34.json new file mode 100644 index 0000000000000000000000000000000000000000..ded436dde002f88280335e6dd939b6cdfc13cb89 --- /dev/null +++ b/HAERAE-HUB/hae-tae_v0.1.2/result_2023-09-30 11:46:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2909556313993174, + "acc_stderr": 0.01327307786590758, + "acc_norm": 0.3302047781569966, + "acc_norm_stderr": 0.013743085603760427 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37442740489942244, + "acc_stderr": 0.004829856058603579, + "acc_norm": 0.481876120294762, + "acc_norm_stderr": 0.00498650229693118 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20434227330779056, + "acc_stderr": 0.014419123980931906, + "acc_norm": 0.20434227330779056, + "acc_norm_stderr": 0.014419123980931906 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.03547854198560826, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.03547854198560826 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.15, + "acc_stderr": 0.035887028128263714, + "acc_norm": 0.15, + "acc_norm_stderr": 0.035887028128263714 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565264, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.1031390134529148, + "acc_stderr": 0.020412564289839272, + "acc_norm": 0.1031390134529148, + "acc_norm_stderr": 0.020412564289839272 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244441, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244441 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.048108401480826346, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.048108401480826346 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483724, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483724 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695248, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695248 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959905, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959905 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525214, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525214 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.344954128440367, + "acc_stderr": 0.02038060540506697, + "acc_norm": 0.344954128440367, + "acc_norm_stderr": 0.02038060540506697 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279053, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279053 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736386, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736386 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226673, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3346938775510204, + "acc_stderr": 0.030209235226242314, + "acc_norm": 0.3346938775510204, + "acc_norm_stderr": 0.030209235226242314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.19831223628691982, + "acc_stderr": 0.02595502084162111, + "acc_norm": 0.19831223628691982, + "acc_norm_stderr": 0.02595502084162111 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842885, + "mc2": 0.420854027075679, + "mc2_stderr": 0.014933313137954875 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3577331759149941, + "acc_stderr": 0.016479808935749976, + "acc_norm": 0.45808736717827625, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HAERAE-HUB/hae-tae_v0.1.2", + "model_sha": "fd9094c0e91bcb07ecf2b89b36a16480e27a93dc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HY-KDPARK/llama-2-koen-13b-dpo-v0.4/result_2023-12-16 09:24:56.json b/HY-KDPARK/llama-2-koen-13b-dpo-v0.4/result_2023-12-16 09:24:56.json new file mode 100644 index 0000000000000000000000000000000000000000..446d504281cc63153bedc511a7ee84650127cb76 --- /dev/null +++ b/HY-KDPARK/llama-2-koen-13b-dpo-v0.4/result_2023-12-16 09:24:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.01411129875167495, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.01447113339264247 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41455885281816374, + "acc_stderr": 0.004916388962142332, + "acc_norm": 0.5623381796454889, + "acc_norm_stderr": 0.004950848456984546 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491345, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491345 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956278, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956278 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.03163145807552379, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.03163145807552379 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.028156036538233217, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.028156036538233217 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844058, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844058 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831027, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831027 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.02762873715566877, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.02762873715566877 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.027996723180631455, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.027996723180631455 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.019249785691717217, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.019249785691717217 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460997, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460997 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298825, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298825 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.011717148751648431, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.011717148751648431 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719128, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719128 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627884, + "mc2": 0.43136545246089486, + "mc2_stderr": 0.014881985381415318 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HY-KDPARK/llama-2-koen-13b-dpo-v0.4", + "model_sha": "a3cd8b7790f43c87f36f7e7289a1a210102dd26f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HY-KDPARK/llama-2-koen-13b-sft-v0.1/result_2023-11-28 04:36:29.json b/HY-KDPARK/llama-2-koen-13b-sft-v0.1/result_2023-11-28 04:36:29.json new file mode 100644 index 0000000000000000000000000000000000000000..ce390c21623e6c096b2b59ccb665e48db02d2128 --- /dev/null +++ b/HY-KDPARK/llama-2-koen-13b-sft-v0.1/result_2023-11-28 04:36:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.01359243151906808, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.014056207319068287 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36974706233817967, + "acc_stderr": 0.004817495546789546, + "acc_norm": 0.47450707030472017, + "acc_norm_stderr": 0.00498329157828904 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4623243933588761, + "acc_stderr": 0.017829131764287187, + "acc_norm": 0.4623243933588761, + "acc_norm_stderr": 0.017829131764287187 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.02823776942208533, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.02823776942208533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.0332319730294294, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.0332319730294294 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.024283140529467295, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.024283140529467295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3903225806451613, + "acc_stderr": 0.02775125663696958, + "acc_norm": 0.3903225806451613, + "acc_norm_stderr": 0.02775125663696958 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.03248577511578401, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.03248577511578401 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.02983280811479601, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.02983280811479601 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960955, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960955 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.417910447761194, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.417910447761194, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02306818884826111, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02306818884826111 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3786127167630058, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.3786127167630058, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3950617283950617, + "acc_stderr": 0.027201117666925647, + "acc_norm": 0.3950617283950617, + "acc_norm_stderr": 0.027201117666925647 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.021244146569074338, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.021244146569074338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.0275300784471103, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.0275300784471103 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577443, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577443 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.02813968944485967, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.02813968944485967 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225606, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225606 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670736, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.32653061224489793, + "acc_stderr": 0.03002105623844031, + "acc_norm": 0.32653061224489793, + "acc_norm_stderr": 0.03002105623844031 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4978902953586498, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.4978902953586498, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28292046936114734, + "acc_stderr": 0.011503891323188978, + "acc_norm": 0.28292046936114734, + "acc_norm_stderr": 0.011503891323188978 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361005, + "mc2": 0.4736196468171595, + "mc2_stderr": 0.016592688559874832 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36363636363636365, + "acc_stderr": 0.016538691603327715, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.01687694116504561 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HY-KDPARK/llama-2-koen-13b-sft-v0.1", + "model_sha": "3b75cac58ea131920b39541547815d14caaa7082", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HY-KDPARK/llama-2-koen-13b-sft-v0.3/result_2023-12-10 07:14:00.json b/HY-KDPARK/llama-2-koen-13b-sft-v0.3/result_2023-12-10 07:14:00.json new file mode 100644 index 0000000000000000000000000000000000000000..94190194dfb07b706cd068158f7697bdb476d0b9 --- /dev/null +++ b/HY-KDPARK/llama-2-koen-13b-sft-v0.3/result_2023-12-10 07:14:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4044368600682594, + "acc_stderr": 0.014342036483436175, + "acc_norm": 0.4726962457337884, + "acc_norm_stderr": 0.014589589101985998 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4191396136227843, + "acc_stderr": 0.004924098711864585, + "acc_norm": 0.5668193586934873, + "acc_norm_stderr": 0.0049450236570322765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.03762738699917057, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.03762738699917057 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464245, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.0282863240755644, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.0282863240755644 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594384, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594384 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5908256880733945, + "acc_stderr": 0.021080670264433738, + "acc_norm": 0.5908256880733945, + "acc_norm_stderr": 0.021080670264433738 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639882, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319464, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195986, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195986 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777305, + "mc2": 0.442704104876821, + "mc2_stderr": 0.015215337318397937 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4462809917355372, + "acc_stderr": 0.017090852631668332, + "acc_norm": 0.4887839433293979, + "acc_norm_stderr": 0.017186028469489287 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HY-KDPARK/llama-2-koen-13b-sft-v0.3", + "model_sha": "5130b6ccb175caaddd0812cfc2f8b1fd3bfe4ae4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_Orca_16_32/result_2023-11-20 12:28:29.json b/HanaGroup/Mini_Orca_16_32/result_2023-11-20 12:28:29.json new file mode 100644 index 0000000000000000000000000000000000000000..35408fd8db04c16af3563e1b2ec0594976ab5895 --- /dev/null +++ b/HanaGroup/Mini_Orca_16_32/result_2023-11-20 12:28:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.439419795221843, + "acc_stderr": 0.014503747823580127, + "acc_norm": 0.4778156996587031, + "acc_norm_stderr": 0.014597001927076136 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4076877116112328, + "acc_stderr": 0.00490400267618433, + "acc_norm": 0.5386377215694085, + "acc_norm_stderr": 0.0049748608784644325 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5721583652618135, + "acc_stderr": 0.017692787927803724, + "acc_norm": 0.5721583652618135, + "acc_norm_stderr": 0.017692787927803724 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056127, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056127 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006114, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006114 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305527, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305527 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353996, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2122905027932961, + "acc_stderr": 0.013676644685831725, + "acc_norm": 0.2122905027932961, + "acc_norm_stderr": 0.013676644685831725 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824873, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824873 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353592, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353592 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713606, + "mc2": 0.4699398119482503, + "mc2_stderr": 0.015489346893307833 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.01717567127983645, + "acc_norm": 0.5218417945690673, + "acc_norm_stderr": 0.017173944474294378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_Orca_16_32", + "model_sha": "1356bee33d15e26ae9738a179058f993134f6141", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_Test_orca01/result_2023-11-11 01:14:39.json b/HanaGroup/Mini_Test_orca01/result_2023-11-11 01:14:39.json new file mode 100644 index 0000000000000000000000000000000000000000..3c3283f13972057a1db883418227686dd283c7c6 --- /dev/null +++ b/HanaGroup/Mini_Test_orca01/result_2023-11-11 01:14:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000322, + "acc_norm": 0.4325938566552901, + "acc_norm_stderr": 0.014478005694182533 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3935471021708823, + "acc_stderr": 0.004875379352079816, + "acc_norm": 0.5049790878311093, + "acc_norm_stderr": 0.0049895339988203545 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.01787084750608172, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.01787084750608172 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507755, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507755 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520196, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520196 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668767, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577457, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577457 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639875, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639875 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.01500576244678616, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.01500576244678616 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398865, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398865 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.4404103175289405, + "mc2_stderr": 0.015432051294700285 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3754427390791027, + "acc_stderr": 0.01664841158951109, + "acc_norm": 0.4344746162927981, + "acc_norm_stderr": 0.017042098620824942 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_Test_orca01", + "model_sha": "c85ec5844cab07c96f6b54292f26bea5e252f3c8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_category/result_2023-11-08 23:58:45.json b/HanaGroup/Mini_category/result_2023-11-08 23:58:45.json new file mode 100644 index 0000000000000000000000000000000000000000..8975c6d74b49087c105df144014b1502e5cf02a7 --- /dev/null +++ b/HanaGroup/Mini_category/result_2023-11-08 23:58:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3387372013651877, + "acc_stderr": 0.013830568927974332, + "acc_norm": 0.3890784982935154, + "acc_norm_stderr": 0.014247309976045607 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3805018920533758, + "acc_stderr": 0.0048451800342716265, + "acc_norm": 0.48297151961760604, + "acc_norm_stderr": 0.004986886806565639 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.454661558109834, + "acc_stderr": 0.017806304585052606, + "acc_norm": 0.454661558109834, + "acc_norm_stderr": 0.017806304585052606 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.0253480060315348, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.0253480060315348 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490385, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269995, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269995 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833942, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45504587155963305, + "acc_stderr": 0.021350503090925167, + "acc_norm": 0.45504587155963305, + "acc_norm_stderr": 0.021350503090925167 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.038607315993160904, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.038607315993160904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.019780465954777508, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.019780465954777508 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861131, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861131 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.293854748603352, + "acc_stderr": 0.015235075776719608, + "acc_norm": 0.293854748603352, + "acc_norm_stderr": 0.015235075776719608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411962, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411962 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.032006820201639065, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.032006820201639065 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32073011734028684, + "acc_stderr": 0.011921199991782613, + "acc_norm": 0.32073011734028684, + "acc_norm_stderr": 0.011921199991782613 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.01596440096558967, + "mc2": 0.4614845426101113, + "mc2_stderr": 0.015908282639721598 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429246, + "acc_norm": 0.4970484061393152, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_category", + "model_sha": "15482113d5d33f4a677f49741dce3c2a53810c4b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_orca_test01/result_2023-11-15 22:28:14.json b/HanaGroup/Mini_orca_test01/result_2023-11-15 22:28:14.json new file mode 100644 index 0000000000000000000000000000000000000000..5dcfd30a72d2548e72759c2d7fbb0ef55d3b9331 --- /dev/null +++ b/HanaGroup/Mini_orca_test01/result_2023-11-15 22:28:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39078498293515357, + "acc_stderr": 0.014258563880513782, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39314877514439356, + "acc_stderr": 0.004874511466836798, + "acc_norm": 0.50318661621191, + "acc_norm_stderr": 0.004989680072717476 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.017784034534992457, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.017784034534992457 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370334, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370334 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3774193548387097, + "acc_stderr": 0.02757596072327824, + "acc_norm": 0.3774193548387097, + "acc_norm_stderr": 0.02757596072327824 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942652, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942652 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652459, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652459 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680804, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505514, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505514 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012372, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012372 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925296, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925296 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510144, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510144 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.01177398032938071, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.01177398032938071 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.4549121253328978, + "mc2_stderr": 0.015789028871035962 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4167650531286895, + "acc_stderr": 0.01695048914610883, + "acc_norm": 0.4899645808736718, + "acc_norm_stderr": 0.017186891286894043 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_orca_test01", + "model_sha": "c5d64eb31a3158983e2f8567d90c51981a424cd6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_orca_test02/result_2023-11-17 15:50:00.json b/HanaGroup/Mini_orca_test02/result_2023-11-17 15:50:00.json new file mode 100644 index 0000000000000000000000000000000000000000..eea347f61f7ddf74cb22bf9db0c4864139991571 --- /dev/null +++ b/HanaGroup/Mini_orca_test02/result_2023-11-17 15:50:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35494880546075086, + "acc_stderr": 0.013983036904094092, + "acc_norm": 0.42235494880546076, + "acc_norm_stderr": 0.014434138713379988 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3633738299143597, + "acc_stderr": 0.004799882248494814, + "acc_norm": 0.45817566221868156, + "acc_norm_stderr": 0.00497229376497873 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128919, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128919 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4278416347381865, + "acc_stderr": 0.017692787927803735, + "acc_norm": 0.4278416347381865, + "acc_norm_stderr": 0.017692787927803735 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755292, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755292 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.02489047176993815, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.02489047176993815 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235897, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235897 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842509, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842509 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4653179190751445, + "acc_stderr": 0.026854257928258893, + "acc_norm": 0.4653179190751445, + "acc_norm_stderr": 0.026854257928258893 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569653, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569653 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.021410999753635918, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.021410999753635918 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377562, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377562 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.028452639985088006, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.028452639985088006 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.019333142020797056, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.019333142020797056 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.01431099954796146, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.01431099954796146 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625166, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625166 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.03241920684693334, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.03241920684693334 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849645, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849645 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219364, + "mc2": 0.4409558641757515, + "mc2_stderr": 0.015583794269994873 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3317591499409681, + "acc_stderr": 0.01618798464215732, + "acc_norm": 0.3860684769775679, + "acc_norm_stderr": 0.016738130760321747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_orca_test02", + "model_sha": "ab2d8616b556f22995feac76cfae8e1d9537c1e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_orca_test03/result_2023-11-17 23:30:15.json b/HanaGroup/Mini_orca_test03/result_2023-11-17 23:30:15.json new file mode 100644 index 0000000000000000000000000000000000000000..1a78c4393402b88e7f453d651c49e4c0d2f432c6 --- /dev/null +++ b/HanaGroup/Mini_orca_test03/result_2023-11-17 23:30:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.0140702655192688, + "acc_norm": 0.4189419795221843, + "acc_norm_stderr": 0.014418106953639011 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38697470623381797, + "acc_stderr": 0.0048606237334611275, + "acc_norm": 0.49830711013742285, + "acc_norm_stderr": 0.0049897528111734115 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913235, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913235 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.049486373240266356, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.049486373240266356 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.01781438523853443, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.01781438523853443 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120575, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120575 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0438986995680878, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0438986995680878 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091852, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981747, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981747 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.019431775677037313, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.019431775677037313 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961462, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961462 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.02873932851398358, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.02873932851398358 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.01197150729498278, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.01197150729498278 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133026, + "mc2": 0.47048205310170765, + "mc2_stderr": 0.01584867112784759 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4214876033057851, + "acc_stderr": 0.016977101932601518, + "acc_norm": 0.4722550177095632, + "acc_norm_stderr": 0.017163867979456016 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_orca_test03", + "model_sha": "0d24157a9820d50b64bf30683143e0805ab589ef", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_orca_test04/result_2023-11-18 00:02:47.json b/HanaGroup/Mini_orca_test04/result_2023-11-18 00:02:47.json new file mode 100644 index 0000000000000000000000000000000000000000..6ce14950862b1f9e07eef89528fd66aad221f2b3 --- /dev/null +++ b/HanaGroup/Mini_orca_test04/result_2023-11-18 00:02:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.0140702655192688, + "acc_norm": 0.4189419795221843, + "acc_norm_stderr": 0.014418106953639011 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38657637920732923, + "acc_stderr": 0.00485969956245146, + "acc_norm": 0.49810794662417845, + "acc_norm_stderr": 0.0049897456858204285 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913235, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913235 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.049486373240266356, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.049486373240266356 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465554, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120575, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120575 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0438986995680878, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0438986995680878 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871937, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871937 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981747, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981747 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.01945076843250551, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.01945076843250551 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961462, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961462 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614193, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614193 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3246414602346806, + "acc_stderr": 0.01195908938853003, + "acc_norm": 0.3246414602346806, + "acc_norm_stderr": 0.01195908938853003 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133026, + "mc2": 0.4703996827708238, + "mc2_stderr": 0.01584673681912838 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4167650531286895, + "acc_stderr": 0.01695048914610882, + "acc_norm": 0.46871310507674147, + "acc_norm_stderr": 0.017156666859785466 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_orca_test04", + "model_sha": "3623bf497344d78bfb3745f33b1b6e72ff86191c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Herry443/LLaMA2-ko-7B-KNUT-v0.1/result_2023-11-13 06:58:41.json b/Herry443/LLaMA2-ko-7B-KNUT-v0.1/result_2023-11-13 06:58:41.json new file mode 100644 index 0000000000000000000000000000000000000000..89543b210291bdab2ce7c80f9df7e8bcbff12956 --- /dev/null +++ b/Herry443/LLaMA2-ko-7B-KNUT-v0.1/result_2023-11-13 06:58:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.013406741767847626, + "acc_norm": 0.3583617747440273, + "acc_norm_stderr": 0.014012883334859859 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3458474407488548, + "acc_stderr": 0.004746716805735756, + "acc_norm": 0.42939653455486954, + "acc_norm_stderr": 0.004939784311448984 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.03446296217088426, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.03446296217088426 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.29757343550446996, + "acc_stderr": 0.016349111912909418, + "acc_norm": 0.29757343550446996, + "acc_norm_stderr": 0.016349111912909418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491227, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491227 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.23318385650224216, + "acc_stderr": 0.02838039114709472, + "acc_norm": 0.23318385650224216, + "acc_norm_stderr": 0.02838039114709472 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728742, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728742 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438015, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438015 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.12745098039215685, + "acc_stderr": 0.03318224921942077, + "acc_norm": 0.12745098039215685, + "acc_norm_stderr": 0.03318224921942077 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341933, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341933 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.29354838709677417, + "acc_stderr": 0.025906087021319295, + "acc_norm": 0.29354838709677417, + "acc_norm_stderr": 0.025906087021319295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.26495726495726496, + "acc_stderr": 0.028911208802749465, + "acc_norm": 0.26495726495726496, + "acc_norm_stderr": 0.028911208802749465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.026199808807561932, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.026199808807561932 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252089, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252089 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.03479185572599659, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.03479185572599659 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.29850746268656714, + "acc_stderr": 0.032357437893550424, + "acc_norm": 0.29850746268656714, + "acc_norm_stderr": 0.032357437893550424 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.031862098516411454, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.031862098516411454 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.021132859182754444, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.021132859182754444 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03309615177059007, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03309615177059007 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.02577311116963045, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.02577311116963045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.02977866303775295, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.02977866303775295 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343585, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.02656892101545715, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.02656892101545715 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.0184334276494019, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.0184334276494019 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.023529242185193106, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.023529242185193106 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788163, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.02957160106575337, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.02957160106575337 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27640156453715775, + "acc_stderr": 0.01142215319455358, + "acc_norm": 0.27640156453715775, + "acc_norm_stderr": 0.01142215319455358 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501943, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501943 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22031823745410037, + "mc1_stderr": 0.014509045171487283, + "mc2": 0.3876715630562864, + "mc2_stderr": 0.014780799577275159 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2668240850059032, + "acc_stderr": 0.015206575684565892, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Herry443/LLaMA2-ko-7B-KNUT-v0.1", + "model_sha": "823d2fece402a057d1a68be83c80985d57a37471", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Herry443/Mistral-7B-KNUT-v0.1/result_2023-10-26 05:52:58.json b/Herry443/Mistral-7B-KNUT-v0.1/result_2023-10-26 05:52:58.json new file mode 100644 index 0000000000000000000000000000000000000000..bb80f183bd26b849e9a2c3c4bb8753a6915a2219 --- /dev/null +++ b/Herry443/Mistral-7B-KNUT-v0.1/result_2023-10-26 05:52:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2687713310580205, + "acc_stderr": 0.012955065963710682, + "acc_norm": 0.3225255972696246, + "acc_norm_stderr": 0.013659980894277366 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3323043218482374, + "acc_stderr": 0.004700767741735566, + "acc_norm": 0.4056960764787891, + "acc_norm_stderr": 0.004900227226433385 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.037999786443706066, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.037999786443706066 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501117, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501117 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.031489558297455304, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.031489558297455304 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632945, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632945 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134987, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134987 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462203, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462203 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.028942004040998167, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.028942004040998167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28717948717948716, + "acc_stderr": 0.022939925418530616, + "acc_norm": 0.28717948717948716, + "acc_norm_stderr": 0.022939925418530616 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030049, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030049 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.452991452991453, + "acc_stderr": 0.03261099873098619, + "acc_norm": 0.452991452991453, + "acc_norm_stderr": 0.03261099873098619 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.026055296901152922, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.026055296901152922 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524575, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524575 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.036030385453603854, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.036030385453603854 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39303482587064675, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.39303482587064675, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554858, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554858 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3092485549132948, + "acc_stderr": 0.02488314057007176, + "acc_norm": 0.3092485549132948, + "acc_norm_stderr": 0.02488314057007176 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.026041766202717163, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.026041766202717163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.018987462257978652, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.018987462257978652 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.026415601914389002, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.026415601914389002 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04391326286724071, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04391326286724071 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.03554180368025689, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.03554180368025689 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.01852175621542303, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.01852175621542303 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101373, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101373 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18981481481481483, + "acc_stderr": 0.026744714834691943, + "acc_norm": 0.18981481481481483, + "acc_norm_stderr": 0.026744714834691943 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010083, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010083 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.026040662474201278, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.026040662474201278 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960244, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960244 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35443037974683544, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.35443037974683544, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28292046936114734, + "acc_stderr": 0.011503891323188976, + "acc_norm": 0.28292046936114734, + "acc_norm_stderr": 0.011503891323188976 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.032282103870378935, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.032282103870378935 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.01541524174023704, + "mc2": 0.4418547715713716, + "mc2_stderr": 0.01568020575059561 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3010625737898465, + "acc_stderr": 0.015771113299945454, + "acc_norm": 0.448642266824085, + "acc_norm_stderr": 0.017099430514725792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Herry443/Mistral-7B-KNUT-v0.1", + "model_sha": "b90832d18d355d77c2e25181f59075070d946978", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Herry443/Mistral-7B-KNUT-v0.2/result_2023-11-28 02:55:57.json b/Herry443/Mistral-7B-KNUT-v0.2/result_2023-11-28 02:55:57.json new file mode 100644 index 0000000000000000000000000000000000000000..bece12d4a9ec556eca09300e60046bf485f761cd --- /dev/null +++ b/Herry443/Mistral-7B-KNUT-v0.2/result_2023-11-28 02:55:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26109215017064846, + "acc_stderr": 0.012835523909473847, + "acc_norm": 0.30204778156996587, + "acc_norm_stderr": 0.013417519144716417 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3200557657837084, + "acc_stderr": 0.0046554427665994646, + "acc_norm": 0.38856801433977295, + "acc_norm_stderr": 0.004864286176731832 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.03599335771456027, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.03599335771456027 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.04931801994220414, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.04931801994220414 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3486590038314176, + "acc_stderr": 0.01704124314349093, + "acc_norm": 0.3486590038314176, + "acc_norm_stderr": 0.01704124314349093 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785138, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785138 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291964, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291964 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41414141414141414, + "acc_stderr": 0.03509438348879628, + "acc_norm": 0.41414141414141414, + "acc_norm_stderr": 0.03509438348879628 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052452, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052452 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.04453197507374983, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.04453197507374983 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.02770935967503249, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.02770935967503249 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5085470085470085, + "acc_stderr": 0.0327513030009703, + "acc_norm": 0.5085470085470085, + "acc_norm_stderr": 0.0327513030009703 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39800995024875624, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.39800995024875624, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.0492365963917331, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0492365963917331 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32098765432098764, + "acc_stderr": 0.025976566010862737, + "acc_norm": 0.32098765432098764, + "acc_norm_stderr": 0.025976566010862737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41100917431192663, + "acc_stderr": 0.021095050687277638, + "acc_norm": 0.41100917431192663, + "acc_norm_stderr": 0.021095050687277638 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.038607315993160904, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.038607315993160904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.01821726955205342, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.01821726955205342 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432403, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997865, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997865 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.01455155365936992, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.01455155365936992 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.03106721126287249, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.03106721126287249 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.38396624472573837, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.38396624472573837, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2522816166883963, + "acc_stderr": 0.011092789056875248, + "acc_norm": 0.2522816166883963, + "acc_norm_stderr": 0.011092789056875248 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.03198001660115072, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.03198001660115072 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624337, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624337 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299948, + "mc2": 0.42165436242518467, + "mc2_stderr": 0.015410741976473186 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2845336481700118, + "acc_stderr": 0.015512301654971767, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Herry443/Mistral-7B-KNUT-v0.2", + "model_sha": "ba21d9b13304dcef6c9d0f0f24d2e7893d569a5c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Herry443/Mistral-7B-KNUT-v0.3/result_2023-12-09 08:52:37.json b/Herry443/Mistral-7B-KNUT-v0.3/result_2023-12-09 08:52:37.json new file mode 100644 index 0000000000000000000000000000000000000000..3f2d446a9be02ff09b142825d2e8fb64b2c745e8 --- /dev/null +++ b/Herry443/Mistral-7B-KNUT-v0.3/result_2023-12-09 08:52:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2738907849829352, + "acc_stderr": 0.013032004972989503, + "acc_norm": 0.3054607508532423, + "acc_norm_stderr": 0.013460080478002505 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3260306711810396, + "acc_stderr": 0.004678006403691725, + "acc_norm": 0.40021907986456884, + "acc_norm_stderr": 0.004889413126208774 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3282247765006386, + "acc_stderr": 0.01679168564019289, + "acc_norm": 0.3282247765006386, + "acc_norm_stderr": 0.01679168564019289 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648026, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648026 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244442, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244442 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.03941707632064889, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.03941707632064889 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.0234546748894043, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.0234546748894043 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5170940170940171, + "acc_stderr": 0.032736940493481824, + "acc_norm": 0.5170940170940171, + "acc_norm_stderr": 0.032736940493481824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443867, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443867 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.044942908662520875, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.044942908662520875 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514566, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514566 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191181, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191181 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491841, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491841 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554859, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554859 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153172, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153172 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02622964917882116, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02622964917882116 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.034107802518361846, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.034107802518361846 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3211009174311927, + "acc_stderr": 0.020018149772733747, + "acc_norm": 0.3211009174311927, + "acc_norm_stderr": 0.020018149772733747 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.041634530313028585, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.041634530313028585 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.027732834353363947, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.027732834353363947 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.018607552131279834, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.018607552131279834 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460994, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467763, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467763 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.02858270975389844, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.02858270975389844 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.02916273841024978, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.02916273841024978 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4177215189873418, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.4177215189873418, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26401564537157757, + "acc_stderr": 0.011258435537723816, + "acc_norm": 0.26401564537157757, + "acc_norm_stderr": 0.011258435537723816 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156467, + "mc2": 0.4346601144729828, + "mc2_stderr": 0.015485642516678326 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2727272727272727, + "acc_stderr": 0.01531185311030035, + "acc_norm": 0.34946871310507677, + "acc_norm_stderr": 0.016392797085769843 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Herry443/Mistral-7B-KNUT-v0.3", + "model_sha": "089a962c7ef124af537742bd25034c601f264fae", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Herry443/Mistral-7B-KNUT-v0.4/result_2023-12-19 10:08:44.json b/Herry443/Mistral-7B-KNUT-v0.4/result_2023-12-19 10:08:44.json new file mode 100644 index 0000000000000000000000000000000000000000..f8a1b48c47ef817513f8c892ac03287828e243d9 --- /dev/null +++ b/Herry443/Mistral-7B-KNUT-v0.4/result_2023-12-19 10:08:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26791808873720135, + "acc_stderr": 0.012942030195136428, + "acc_norm": 0.31313993174061433, + "acc_norm_stderr": 0.013552671543623496 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32732523401712804, + "acc_stderr": 0.00468278079050834, + "acc_norm": 0.40380402310296754, + "acc_norm_stderr": 0.004896563126116813 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3116219667943806, + "acc_stderr": 0.016562433867284176, + "acc_norm": 0.3116219667943806, + "acc_norm_stderr": 0.016562433867284176 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3504823151125402, + "acc_stderr": 0.02709865262130175, + "acc_norm": 0.3504823151125402, + "acc_norm_stderr": 0.02709865262130175 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876718, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876718 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3319327731092437, + "acc_stderr": 0.030588697013783663, + "acc_norm": 0.3319327731092437, + "acc_norm_stderr": 0.030588697013783663 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30256410256410254, + "acc_stderr": 0.02329088805377272, + "acc_norm": 0.30256410256410254, + "acc_norm_stderr": 0.02329088805377272 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114475, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114475 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.027273890594300642, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.027273890594300642 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5341880341880342, + "acc_stderr": 0.03267942734081227, + "acc_norm": 0.5341880341880342, + "acc_norm_stderr": 0.03267942734081227 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3132075471698113, + "acc_stderr": 0.028544793319055333, + "acc_norm": 0.3132075471698113, + "acc_norm_stderr": 0.028544793319055333 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940588, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940588 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114993, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114993 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39800995024875624, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.39800995024875624, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594316, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.025305258131879723, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.025305258131879723 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924055, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924055 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.024748624490537375, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.024748624490537375 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3100917431192661, + "acc_stderr": 0.019830849684439756, + "acc_norm": 0.3100917431192661, + "acc_norm_stderr": 0.019830849684439756 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.027245613047215355, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.027245613047215355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.035541803680256896, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.035541803680256896 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.01834252984527591, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.01834252984527591 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631296, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631296 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850409, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850409 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3020408163265306, + "acc_stderr": 0.029393609319879815, + "acc_norm": 0.3020408163265306, + "acc_norm_stderr": 0.029393609319879815 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.379746835443038, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.379746835443038, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845531, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845531 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283349, + "mc2": 0.42277041139901306, + "mc2_stderr": 0.015451140013408284 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605975, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.01645549600031454 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Herry443/Mistral-7B-KNUT-v0.4", + "model_sha": "ed7abbc15e628a6832b00b24aad888e015e2a65b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HuggingFaceH4/zephyr-7b-beta/result_2023-11-01 04:21:47.json b/HuggingFaceH4/zephyr-7b-beta/result_2023-11-01 04:21:47.json new file mode 100644 index 0000000000000000000000000000000000000000..939b5a31e8c4805c413e7161e3c8d251331d34ad --- /dev/null +++ b/HuggingFaceH4/zephyr-7b-beta/result_2023-11-01 04:21:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.01379618294778556, + "acc_norm": 0.3848122866894198, + "acc_norm_stderr": 0.014218371065251112 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35480979884485164, + "acc_stderr": 0.004774778180345192, + "acc_norm": 0.44911372236606256, + "acc_norm_stderr": 0.00496387293685794 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41762452107279696, + "acc_stderr": 0.017635637326951534, + "acc_norm": 0.41762452107279696, + "acc_norm_stderr": 0.017635637326951534 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.029896145682095462, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.029896145682095462 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168284, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168284 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296546, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296546 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536821, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536821 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413865, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413865 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342658, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342658 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.026882643434022885, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.026882643434022885 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47155963302752296, + "acc_stderr": 0.02140261569734804, + "acc_norm": 0.47155963302752296, + "acc_norm_stderr": 0.02140261569734804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127152, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127152 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259297, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259297 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507215, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507215 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.329608938547486, + "acc_stderr": 0.015721531075183884, + "acc_norm": 0.329608938547486, + "acc_norm_stderr": 0.015721531075183884 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4810126582278481, + "acc_stderr": 0.03252375148090448, + "acc_norm": 0.4810126582278481, + "acc_norm_stderr": 0.03252375148090448 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29791395045632335, + "acc_stderr": 0.011680717340400059, + "acc_norm": 0.29791395045632335, + "acc_norm_stderr": 0.011680717340400059 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.03198001660115072, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.03198001660115072 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091707, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091707 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3317013463892289, + "mc1_stderr": 0.01648214881024147, + "mc2": 0.5171680571717291, + "mc2_stderr": 0.01606077987901482 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39787485242030696, + "acc_stderr": 0.01682795905473339, + "acc_norm": 0.4014167650531287, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HuggingFaceH4/zephyr-7b-beta", + "model_sha": "3bac358730f8806e5c3dc7c7e19eb36e045bf720", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/COKAL-DPO-13b-v2/result_2023-11-11 08:29:02.json b/HumanF-MarkrAI/COKAL-DPO-13b-v2/result_2023-11-11 08:29:02.json new file mode 100644 index 0000000000000000000000000000000000000000..4b59dbe1b1b7d5a3a16f7f8fdf10c5d11464019e --- /dev/null +++ b/HumanF-MarkrAI/COKAL-DPO-13b-v2/result_2023-11-11 08:29:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5059726962457338, + "acc_stderr": 0.014610348300255793, + "acc_norm": 0.5494880546075085, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4880501892053376, + "acc_stderr": 0.00498835614649901, + "acc_norm": 0.6301533559051982, + "acc_norm_stderr": 0.00481776358141023 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.038194861407583984, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.038194861407583984 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016336, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016336 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028337, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028337 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235907, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235907 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514566, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514566 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.037038511930995215, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.037038511930995215 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972613, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842424, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.020920058346111076, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.020920058346111076 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.0282451340243873, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.0282451340243873 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.01442229220480885, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.01442229220480885 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35853976531942633, + "acc_stderr": 0.012248487319682746, + "acc_norm": 0.35853976531942633, + "acc_norm_stderr": 0.012248487319682746 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3659730722154223, + "mc1_stderr": 0.01686294168408836, + "mc2": 0.5166857407308614, + "mc2_stderr": 0.01622317540419704 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4675324675324675, + "acc_stderr": 0.017154073716682865, + "acc_norm": 0.4982290436835891, + "acc_norm_stderr": 0.017190246276231863 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/COKAL-DPO-13b-v2", + "model_sha": "f90b0c3f6f91a58616aef3a19bdd1dc3c242028a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/COKAL-DPO-13b-v3/result_2023-11-26 12:58:31.json b/HumanF-MarkrAI/COKAL-DPO-13b-v3/result_2023-11-26 12:58:31.json new file mode 100644 index 0000000000000000000000000000000000000000..e407497267259c7b3f1ffed3a5b7cbb743e75334 --- /dev/null +++ b/HumanF-MarkrAI/COKAL-DPO-13b-v3/result_2023-11-26 12:58:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47696245733788395, + "acc_stderr": 0.014595873205358269, + "acc_norm": 0.5324232081911263, + "acc_norm_stderr": 0.014580637569995426 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46773551085441145, + "acc_stderr": 0.004979381876712608, + "acc_norm": 0.6227843059151563, + "acc_norm_stderr": 0.004836990373261561 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.017736470837800694, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.017736470837800694 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056126, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056126 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836925, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836925 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.031342504862454025, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.031342504862454025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112147, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112147 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518754, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518754 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6055045871559633, + "acc_stderr": 0.020954642108587506, + "acc_norm": 0.6055045871559633, + "acc_norm_stderr": 0.020954642108587506 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.01989841271763589, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.01989841271763589 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882618, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882618 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131117, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131117 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37222946544980445, + "acc_stderr": 0.012346241297204366, + "acc_norm": 0.37222946544980445, + "acc_norm_stderr": 0.012346241297204366 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.01645126444006824, + "mc2": 0.4865420269226251, + "mc2_stderr": 0.016014497778680654 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4675324675324675, + "acc_stderr": 0.01715407371668286, + "acc_norm": 0.5029515938606848, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/COKAL-DPO-13b-v3", + "model_sha": "64a95028cd730b0453dba44259b776a455f86049", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/Dear_My_best_Friends-v4-13B/result_2023-11-28 17:54:12.json b/HumanF-MarkrAI/Dear_My_best_Friends-v4-13B/result_2023-11-28 17:54:12.json new file mode 100644 index 0000000000000000000000000000000000000000..00ea1e398f5a18abe027913ab25e0c29cc2bea5c --- /dev/null +++ b/HumanF-MarkrAI/Dear_My_best_Friends-v4-13B/result_2023-11-28 17:54:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4206484641638225, + "acc_stderr": 0.014426211252508406, + "acc_norm": 0.4786689419795222, + "acc_norm_stderr": 0.014598087973127104 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43108942441744674, + "acc_stderr": 0.0049421645859914695, + "acc_norm": 0.5748854809798845, + "acc_norm_stderr": 0.004933500261683597 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.017753396973908493, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.017753396973908493 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413866, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413866 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833935, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833935 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6311926605504588, + "acc_stderr": 0.02068622756072953, + "acc_norm": 0.6311926605504588, + "acc_norm_stderr": 0.02068622756072953 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.019542101564854114, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.019542101564854114 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.031137304297185805, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.031137304297185805 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3539765319426336, + "acc_stderr": 0.012213504731731637, + "acc_norm": 0.3539765319426336, + "acc_norm_stderr": 0.012213504731731637 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882462, + "mc2": 0.4658908168793715, + "mc2_stderr": 0.01536090399308638 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46871310507674147, + "acc_stderr": 0.017156666859785466, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/Dear_My_best_Friends-v4-13B", + "model_sha": "9939860a1167f1fdb90b3a206eadf07e8873c7e6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/Yi_lee-SFT-v2-6B/result_2023-12-12 07:30:13.json b/HumanF-MarkrAI/Yi_lee-SFT-v2-6B/result_2023-12-12 07:30:13.json new file mode 100644 index 0000000000000000000000000000000000000000..8e24b5619b65879228e773d9b8828ac4625cc4be --- /dev/null +++ b/HumanF-MarkrAI/Yi_lee-SFT-v2-6B/result_2023-12-12 07:30:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3506825938566553, + "acc_stderr": 0.01394463593072609, + "acc_norm": 0.40784982935153585, + "acc_norm_stderr": 0.014361097288449696 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3963353913563035, + "acc_stderr": 0.004881359589148996, + "acc_norm": 0.5270862378012349, + "acc_norm_stderr": 0.004982454383162067 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.0177478742456836, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.0177478742456836 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.0378913442461155, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.0378913442461155 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.02837327096106942, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.02837327096106942 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.034648816750163375, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.034648816750163375 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.0253106392549339, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.0253106392549339 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674078, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149145, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149145 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6238532110091743, + "acc_stderr": 0.020769231968205074, + "acc_norm": 0.6238532110091743, + "acc_norm_stderr": 0.020769231968205074 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259293, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.0399930971277747, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.0399930971277747 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.0278079901413202, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.0278079901413202 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025425, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025425 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.01450897945355398, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.01450897945355398 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411952, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411952 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704716, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704716 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4453783861858108, + "mc2_stderr": 0.015094573783194452 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5312868949232585, + "acc_stderr": 0.017156666859785473, + "acc_norm": 0.58913813459268, + "acc_norm_stderr": 0.016914972767841045 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/Yi_lee-SFT-v2-6B", + "model_sha": "17959d8351fad03a56f0d8f4607ebe23ae764f34", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/mistralopithecus-v3-dpo-7b/result_2023-11-26 11:42:24.json b/HumanF-MarkrAI/mistralopithecus-v3-dpo-7b/result_2023-11-26 11:42:24.json new file mode 100644 index 0000000000000000000000000000000000000000..ab7f212410ed867c64fdefd48386ac837df27485 --- /dev/null +++ b/HumanF-MarkrAI/mistralopithecus-v3-dpo-7b/result_2023-11-26 11:42:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46331058020477817, + "acc_stderr": 0.01457200052775699, + "acc_norm": 0.507679180887372, + "acc_norm_stderr": 0.014609667440892574 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4320852419836686, + "acc_stderr": 0.00494353724234442, + "acc_norm": 0.5420235012945628, + "acc_norm_stderr": 0.00497212652303194 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431665, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431665 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177495, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972742, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972742 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637793, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637793 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142635, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142635 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.045190820213197716, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.045190820213197716 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.01939305840235545, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.01939305840235545 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534785, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534785 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000534, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2837988826815642, + "acc_stderr": 0.015078358970751772, + "acc_norm": 0.2837988826815642, + "acc_norm_stderr": 0.015078358970751772 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.02833295951403122, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.02833295951403122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823063004, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823063004 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674119, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674119 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253595, + "mc2": 0.4169766746281562, + "mc2_stderr": 0.016347162773038867 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31759149940968123, + "acc_stderr": 0.01600558187622931, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.01627295299701912 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/mistralopithecus-v3-dpo-7b", + "model_sha": "d7759639c8b879a011233f9ca5af1481b844e22f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13B-v3/result_2023-10-24 18:02:37.json b/HumanF-MarkrAI/pub-llama-13B-v3/result_2023-10-24 18:02:37.json new file mode 100644 index 0000000000000000000000000000000000000000..0f22bb83c01d882f93df58c6f128e3b2ddb4c587 --- /dev/null +++ b/HumanF-MarkrAI/pub-llama-13B-v3/result_2023-10-24 18:02:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36945392491467577, + "acc_stderr": 0.014104578366491888, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40450109539932283, + "acc_stderr": 0.004897921845492105, + "acc_norm": 0.5392352121091416, + "acc_norm_stderr": 0.004974395131539592 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5070242656449553, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.5070242656449553, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.037400593820293204, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.037400593820293204 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.034051553805619514, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.034051553805619514 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502744, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502744 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.035161847729521675, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.035161847729521675 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02487081525105709, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02487081525105709 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0383515395439942, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0383515395439942 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.02866199620233531, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.02866199620233531 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3559322033898305, + "acc_stderr": 0.012228645537277573, + "acc_norm": 0.3559322033898305, + "acc_norm_stderr": 0.012228645537277573 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431855, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431855 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775527, + "mc2": 0.4355517094226067, + "mc2_stderr": 0.015309009273280678 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.017107618859549346, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/pub-llama-13B-v3", + "model_sha": "a077b211925e00e7bd8e3f6bdf29476c59b81d6d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13B-v4/result_2023-11-03 03:39:52.json b/HumanF-MarkrAI/pub-llama-13B-v4/result_2023-11-03 03:39:52.json new file mode 100644 index 0000000000000000000000000000000000000000..fa83d02f099d109b665d07119f4dbbaaa4927e8d --- /dev/null +++ b/HumanF-MarkrAI/pub-llama-13B-v4/result_2023-11-03 03:39:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39590443686006827, + "acc_stderr": 0.014291228393536588, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.420035849432384, + "acc_stderr": 0.004925556104679414, + "acc_norm": 0.5587532364070902, + "acc_norm_stderr": 0.004955212787832385 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.01784491809046855, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.01784491809046855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029321, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029321 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484068, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484068 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540632, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.026817718130348923, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.026817718130348923 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353927, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353927 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5394495412844037, + "acc_stderr": 0.0213704946099951, + "acc_norm": 0.5394495412844037, + "acc_norm_stderr": 0.0213704946099951 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02782610930728369, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02782610930728369 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215927, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215927 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569746, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464752, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464752 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31211750305997554, + "mc1_stderr": 0.01622075676952091, + "mc2": 0.4796342874579499, + "mc2_stderr": 0.015443652481064269 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39669421487603307, + "acc_stderr": 0.016819438642971408, + "acc_norm": 0.42266824085005905, + "acc_norm_stderr": 0.0169835060795776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/pub-llama-13B-v4", + "model_sha": "8f327f994717ac7f2959674cc066cc11434626b1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13B-v5/result_2023-11-02 17:54:22.json b/HumanF-MarkrAI/pub-llama-13B-v5/result_2023-11-02 17:54:22.json new file mode 100644 index 0000000000000000000000000000000000000000..3ef8d945628dc86e524e95d31b86aafe4850110d --- /dev/null +++ b/HumanF-MarkrAI/pub-llama-13B-v5/result_2023-11-02 17:54:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4061433447098976, + "acc_stderr": 0.014351656690097858, + "acc_norm": 0.46757679180887374, + "acc_norm_stderr": 0.014580637569995421 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4268074088826927, + "acc_stderr": 0.004936029827672038, + "acc_norm": 0.5713005377414858, + "acc_norm_stderr": 0.004938787067611805 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5542784163473818, + "acc_stderr": 0.017774297282479506, + "acc_norm": 0.5542784163473818, + "acc_norm_stderr": 0.017774297282479506 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933917, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933917 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959316, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959316 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.02799672318063146, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.02799672318063146 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.01972205893961807, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.01972205893961807 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.030290619180485687, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.030290619180485687 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34876140808344197, + "acc_stderr": 0.012172035157127113, + "acc_norm": 0.34876140808344197, + "acc_norm_stderr": 0.012172035157127113 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834559, + "mc2": 0.4231914841328799, + "mc2_stderr": 0.014828818370873126 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.01718938362722969, + "acc_norm": 0.5667060212514758, + "acc_norm_stderr": 0.017036683641893098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/pub-llama-13B-v5", + "model_sha": "1f872cab411ce3259a7fc23816b8bce1ca67f4b7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13B-v6/result_2023-11-04 16:02:32.json b/HumanF-MarkrAI/pub-llama-13B-v6/result_2023-11-04 16:02:32.json new file mode 100644 index 0000000000000000000000000000000000000000..de4ed80cacebe91ae87a1b0efb30f4266dea61a0 --- /dev/null +++ b/HumanF-MarkrAI/pub-llama-13B-v6/result_2023-11-04 16:02:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4104095563139932, + "acc_stderr": 0.014374922192642662, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007105 + }, + "harness|ko_hellaswag|10": { + "acc": 0.427504481179048, + "acc_stderr": 0.004937054233711568, + "acc_norm": 0.5733917546305517, + "acc_norm_stderr": 0.004935735300348869 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533246, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533246 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419034, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419034 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.02524277098712617, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.02524277098712617 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073824, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073824 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119994, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119994 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939101, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6220183486238532, + "acc_stderr": 0.02078918706672812, + "acc_norm": 0.6220183486238532, + "acc_norm_stderr": 0.02078918706672812 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141114, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141114 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.037709700493470166, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.037709700493470166 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976698, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976698 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3644067796610169, + "acc_stderr": 0.012291694983056474, + "acc_norm": 0.3644067796610169, + "acc_norm_stderr": 0.012291694983056474 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31211750305997554, + "mc1_stderr": 0.01622075676952091, + "mc2": 0.4713446696460188, + "mc2_stderr": 0.01534716845498683 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5667060212514758, + "acc_norm_stderr": 0.017036683641893098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/pub-llama-13B-v6", + "model_sha": "af533b0f41590d5d8c5ced917b19a82ac98bb201", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13b-v1/result_2023-10-19 18:44:30.json b/HumanF-MarkrAI/pub-llama-13b-v1/result_2023-10-19 18:44:30.json new file mode 100644 index 0000000000000000000000000000000000000000..0ebf33ec25a761caec265aaec21f178de036c605 --- /dev/null +++ b/HumanF-MarkrAI/pub-llama-13b-v1/result_2023-10-19 18:44:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3677474402730375, + "acc_stderr": 0.01409099561816849, + "acc_norm": 0.41552901023890787, + "acc_norm_stderr": 0.01440136664121639 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40579565823541125, + "acc_stderr": 0.004900417982582061, + "acc_norm": 0.5321649073889664, + "acc_norm_stderr": 0.004979446038824757 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5006385696040868, + "acc_stderr": 0.01787994891443168, + "acc_norm": 0.5006385696040868, + "acc_norm_stderr": 0.01787994891443168 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357787, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.02839442137098453, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.02839442137098453 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990028, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990028 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102318, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102318 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998576, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998576 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066482, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583639, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583639 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.019431775677037313, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.019431775677037313 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985992, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985992 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3624511082138201, + "acc_stderr": 0.012277512533252495, + "acc_norm": 0.3624511082138201, + "acc_norm_stderr": 0.012277512533252495 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.4364091486561351, + "mc2_stderr": 0.015369734802451228 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3919716646989374, + "acc_stderr": 0.016784332119424077, + "acc_norm": 0.43565525383707204, + "acc_norm_stderr": 0.017047415229476334 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/pub-llama-13b-v1", + "model_sha": "4aa21e41dfcb82ff842306b3b5eadd2b258bfc80", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13b-v2/result_2023-10-22 16:02:46.json b/HumanF-MarkrAI/pub-llama-13b-v2/result_2023-10-22 16:02:46.json new file mode 100644 index 0000000000000000000000000000000000000000..7d6e7f824011eef0be37e0eb0a9eb0c103636d19 --- /dev/null +++ b/HumanF-MarkrAI/pub-llama-13b-v2/result_2023-10-22 16:02:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142824, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303026 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4048994224258116, + "acc_stderr": 0.004898693652043317, + "acc_norm": 0.5401314479187412, + "acc_norm_stderr": 0.0049736830262021746 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5019157088122606, + "acc_stderr": 0.017879832259026677, + "acc_norm": 0.5019157088122606, + "acc_norm_stderr": 0.017879832259026677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231008, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231008 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.02506909438729654, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.02506909438729654 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536821, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536821 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906234, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906234 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.035161847729521675, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.035161847729521675 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562424, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.02677299065336182, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.02677299065336182 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5376146788990825, + "acc_stderr": 0.021376575274397576, + "acc_norm": 0.5376146788990825, + "acc_norm_stderr": 0.021376575274397576 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398865, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398865 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316506, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316506 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608742, + "mc2": 0.43609767583849846, + "mc2_stderr": 0.015308496603243212 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4214876033057851, + "acc_stderr": 0.016977101932601518, + "acc_norm": 0.4757969303423849, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/pub-llama-13b-v2", + "model_sha": "d59387039c395781b62f514db7bf4fb32d254522", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Intel/neural-chat-7b-v3-1/result_2023-11-28 00:35:05.json b/Intel/neural-chat-7b-v3-1/result_2023-11-28 00:35:05.json new file mode 100644 index 0000000000000000000000000000000000000000..ee680139612ff97bebb80c8bce408247f12791ae --- /dev/null +++ b/Intel/neural-chat-7b-v3-1/result_2023-11-28 00:35:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.01388881628678211, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892896 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3745269866560446, + "acc_stderr": 0.004830113797327052, + "acc_norm": 0.47998406691894047, + "acc_norm_stderr": 0.00498578162046701 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468537, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468537 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197426, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.0336612448905145, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.0336612448905145 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347354, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347354 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924336, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924336 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.0344578996436275, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.0344578996436275 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.02680372058320619, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.02680372058320619 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573695, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573695 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296376, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786171, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786171 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163909, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163909 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214938, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214938 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.037694303145125674, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.037694303145125674 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3708690330477356, + "mc1_stderr": 0.016909693580248807, + "mc2": 0.5496429922579386, + "mc2_stderr": 0.016045283495853307 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4167650531286895, + "acc_stderr": 0.016950489146108826, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.017057753702160283 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Intel/neural-chat-7b-v3-1", + "model_sha": "c70aa428800d151f4eae2b6d4b6a08c773868987", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JYKIM-AI/Mistral-7B-SFT-v0.1/result_2023-11-20 10:30:05.json b/JYKIM-AI/Mistral-7B-SFT-v0.1/result_2023-11-20 10:30:05.json new file mode 100644 index 0000000000000000000000000000000000000000..997dea0055a29b2c29387b316fbcd1a349512a5a --- /dev/null +++ b/JYKIM-AI/Mistral-7B-SFT-v0.1/result_2023-11-20 10:30:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25426621160409557, + "acc_stderr": 0.012724999945157741, + "acc_norm": 0.31143344709897613, + "acc_norm_stderr": 0.013532472099850944 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33100975901214896, + "acc_stderr": 0.00469614833957098, + "acc_norm": 0.40290778729336785, + "acc_norm_stderr": 0.004894801119898609 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691583, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691583 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260597, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260597 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.32567049808429116, + "acc_stderr": 0.016757989458549675, + "acc_norm": 0.32567049808429116, + "acc_norm_stderr": 0.016757989458549675 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501117, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501117 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553026, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553026 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.026082700695399662, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.026082700695399662 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.0403931497872456, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.0403931497872456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713545, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713545 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052455, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052455 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854932, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18226600985221675, + "acc_stderr": 0.02716334085964515, + "acc_norm": 0.18226600985221675, + "acc_norm_stderr": 0.02716334085964515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029258, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029258 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.02648035717989569, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.02648035717989569 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275798, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275798 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916718, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916718 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123567, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123567 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868055, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868055 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836183, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836183 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28623853211009176, + "acc_stderr": 0.019379436628919982, + "acc_norm": 0.28623853211009176, + "acc_norm_stderr": 0.019379436628919982 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818737, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818737 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537534, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537534 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307857, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307857 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010088, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010088 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2516297262059974, + "acc_stderr": 0.011083276280441904, + "acc_norm": 0.2516297262059974, + "acc_norm_stderr": 0.011083276280441904 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842892, + "mc2": 0.4372879851386349, + "mc2_stderr": 0.015410544751862798 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22904368358913813, + "acc_stderr": 0.01444737227725382, + "acc_norm": 0.2715466351829988, + "acc_norm_stderr": 0.015291071117310378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JYKIM-AI/Mistral-7B-SFT-v0.1", + "model_sha": "d950efa7c559ee0ef5e785429f1e354386cfedf0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JYKIM-AI/Mistral-7B-SFT/result_2023-11-20 11:05:49.json b/JYKIM-AI/Mistral-7B-SFT/result_2023-11-20 11:05:49.json new file mode 100644 index 0000000000000000000000000000000000000000..cfa92074349f1267ef56b5ea7ac2d165dd0a17e2 --- /dev/null +++ b/JYKIM-AI/Mistral-7B-SFT/result_2023-11-20 11:05:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25426621160409557, + "acc_stderr": 0.012724999945157741, + "acc_norm": 0.31143344709897613, + "acc_norm_stderr": 0.013532472099850944 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3311093407687712, + "acc_stderr": 0.004696505101217403, + "acc_norm": 0.40290778729336785, + "acc_norm_stderr": 0.004894801119898608 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691583, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691583 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260597, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260597 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.32567049808429116, + "acc_stderr": 0.016757989458549675, + "acc_norm": 0.32567049808429116, + "acc_norm_stderr": 0.016757989458549675 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501117, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501117 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553026, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553026 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.026082700695399662, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.026082700695399662 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.0403931497872456, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.0403931497872456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713545, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713545 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052455, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052455 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854932, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18226600985221675, + "acc_stderr": 0.02716334085964515, + "acc_norm": 0.18226600985221675, + "acc_norm_stderr": 0.02716334085964515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029258, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029258 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.02648035717989569, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.02648035717989569 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275798, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275798 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916718, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916718 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123567, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123567 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868055, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868055 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836183, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836183 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28623853211009176, + "acc_stderr": 0.019379436628919982, + "acc_norm": 0.28623853211009176, + "acc_norm_stderr": 0.019379436628919982 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818737, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818737 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537534, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537534 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307857, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307857 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010088, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010088 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2516297262059974, + "acc_stderr": 0.011083276280441904, + "acc_norm": 0.2516297262059974, + "acc_norm_stderr": 0.011083276280441904 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842892, + "mc2": 0.4372897231981029, + "mc2_stderr": 0.01541061408460767 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22904368358913813, + "acc_stderr": 0.01444737227725382, + "acc_norm": 0.2715466351829988, + "acc_norm_stderr": 0.015291071117310378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JYKIM-AI/Mistral-7B-SFT", + "model_sha": "ea2e2395fcf295d293c236b0228ece970cca5aba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/Mistral-instruct-DPO-Y24-v2/result_2023-12-06 23:00:18.json b/Ja3ck/Mistral-instruct-DPO-Y24-v2/result_2023-12-06 23:00:18.json new file mode 100644 index 0000000000000000000000000000000000000000..0e7621bf13fad345ca0f7d6cb23c1ac60a3a950b --- /dev/null +++ b/Ja3ck/Mistral-instruct-DPO-Y24-v2/result_2023-12-06 23:00:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840053, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38697470623381797, + "acc_stderr": 0.004860623733461132, + "acc_norm": 0.5070703047201752, + "acc_norm_stderr": 0.004989282516055394 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49169859514687103, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.49169859514687103, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.028397944907806612, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.028397944907806612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841585, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841585 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592154, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592154 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02874204090394849, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02874204090394849 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699954, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.04062990784146667, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.04062990784146667 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.02687408588351835, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.02687408588351835 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594384, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594384 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762633, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762633 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997865, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997865 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254187, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254187 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3435462842242503, + "acc_stderr": 0.012128961174190161, + "acc_norm": 0.3435462842242503, + "acc_norm_stderr": 0.012128961174190161 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431855, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431855 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.46644130422803615, + "mc2_stderr": 0.015463171251968822 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41912632821723733, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.017090852631668332 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/Mistral-instruct-DPO-Y24-v2", + "model_sha": "15acc5b8edbcbfda168710c8764a4d13fc98fd05", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/Mistral-instruct-IPO-Y24-v1/result_2023-12-11 06:51:39.json b/Ja3ck/Mistral-instruct-IPO-Y24-v1/result_2023-12-11 06:51:39.json new file mode 100644 index 0000000000000000000000000000000000000000..0d8d05bf2d1d75e70fb99d4110b1b93980342812 --- /dev/null +++ b/Ja3ck/Mistral-instruct-IPO-Y24-v1/result_2023-12-11 06:51:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38310580204778155, + "acc_stderr": 0.014206472661672877, + "acc_norm": 0.4232081911262799, + "acc_norm_stderr": 0.014438036220848017 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39454291973710415, + "acc_stderr": 0.004877534215987089, + "acc_norm": 0.5108544114718183, + "acc_norm_stderr": 0.0049886054982739 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.01787994891443167, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.01787994891443167 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094527, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094527 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.0250437573185202, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.0250437573185202 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668773, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668773 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521664, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521664 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.019886221037501862, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.019886221037501862 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.046355501356099754, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.046355501356099754 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2860335195530726, + "acc_stderr": 0.015113972129062136, + "acc_norm": 0.2860335195530726, + "acc_norm_stderr": 0.015113972129062136 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.03181425118197787, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.03181425118197787 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3468057366362451, + "acc_stderr": 0.012156071332318705, + "acc_norm": 0.3468057366362451, + "acc_norm_stderr": 0.012156071332318705 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.46200402478414904, + "mc2_stderr": 0.015516827306627103 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39433293978748524, + "acc_stderr": 0.016802090674893203, + "acc_norm": 0.4769775678866588, + "acc_norm_stderr": 0.017172121546727634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/Mistral-instruct-IPO-Y24-v1", + "model_sha": "322906ac8b7dd81de714569db3848eda97d5d40f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/Mistral-instruct-Y24-DPO/result_2023-11-28 01:13:47.json b/Ja3ck/Mistral-instruct-Y24-DPO/result_2023-11-28 01:13:47.json new file mode 100644 index 0000000000000000000000000000000000000000..b27a0dcc42c575ae97e04a4ad7d1d9d71433a936 --- /dev/null +++ b/Ja3ck/Mistral-instruct-Y24-DPO/result_2023-11-28 01:13:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3873720136518771, + "acc_stderr": 0.01423587248790987, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650647 + }, + "harness|ko_hellaswag|10": { + "acc": 0.392850029874527, + "acc_stderr": 0.0048738583238407945, + "acc_norm": 0.5120493925512846, + "acc_norm_stderr": 0.004988332289642083 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219295, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219295 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5057471264367817, + "acc_stderr": 0.017878782326129224, + "acc_norm": 0.5057471264367817, + "acc_norm_stderr": 0.017878782326129224 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.028422687404312107, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.028422687404312107 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.028447965476231022, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.028447965476231022 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137595, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137595 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489358, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489358 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138296, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521664, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521664 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115882, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875192, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875192 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.033622774366080424, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.033622774366080424 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786173, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786173 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585895, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585895 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.4623692353701492, + "mc2_stderr": 0.015502597273810991 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3860684769775679, + "acc_stderr": 0.016738130760321743, + "acc_norm": 0.4722550177095632, + "acc_norm_stderr": 0.017163867979456012 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/Mistral-instruct-Y24-DPO", + "model_sha": "5cadddfbeeac1dd2be25ea036d8623968e987f3c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/Mistral-instruct-Y24-v5/result_2023-11-24 03:55:25.json b/Ja3ck/Mistral-instruct-Y24-v5/result_2023-11-24 03:55:25.json new file mode 100644 index 0000000000000000000000000000000000000000..2a53bc03e4bd24718b7022542c684c3bacdba510 --- /dev/null +++ b/Ja3ck/Mistral-instruct-Y24-v5/result_2023-11-24 03:55:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650647 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3906592312288389, + "acc_stderr": 0.004869010152280755, + "acc_norm": 0.5073690499900418, + "acc_norm_stderr": 0.004989239462835229 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.01787924897058436, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.01787924897058436 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840678, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840678 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.028422687404312107, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.028422687404312107 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137595, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137595 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400477, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400477 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.021414757058175502, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.021414757058175502 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.01991037746310594, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.01991037746310594 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.046355501356099754, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.046355501356099754 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.015024083883322884, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.015024083883322884 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329387, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329387 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3468057366362451, + "acc_stderr": 0.012156071332318705, + "acc_norm": 0.3468057366362451, + "acc_norm_stderr": 0.012156071332318705 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024637, + "mc2": 0.44256276494088104, + "mc2_stderr": 0.015431425162220794 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38488783943329397, + "acc_stderr": 0.016728579701498665, + "acc_norm": 0.4722550177095632, + "acc_norm_stderr": 0.01716386797945601 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/Mistral-instruct-Y24-v5", + "model_sha": "5d268f9f5c87c414661e40ffc464ae5686964586", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/Mistral-instruct-Y24-v6/result_2023-12-04 07:37:56.json b/Ja3ck/Mistral-instruct-Y24-v6/result_2023-12-04 07:37:56.json new file mode 100644 index 0000000000000000000000000000000000000000..99f666f940532ae19a5467126febc9baa32bcf98 --- /dev/null +++ b/Ja3ck/Mistral-instruct-Y24-v6/result_2023-12-04 07:37:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34726962457337884, + "acc_stderr": 0.013913034529620442, + "acc_norm": 0.39505119453924914, + "acc_norm_stderr": 0.014285898292938175 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38259310894244175, + "acc_stderr": 0.0048502689869033494, + "acc_norm": 0.49412467635929097, + "acc_norm_stderr": 0.004989436910754223 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370608, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370608 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49808429118773945, + "acc_stderr": 0.017879832259026677, + "acc_norm": 0.49808429118773945, + "acc_norm_stderr": 0.017879832259026677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868554, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868554 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02874204090394849, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02874204090394849 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119994, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119994 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699954, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.02684298551961537, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.02684298551961537 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.027786800931427453, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.027786800931427453 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.042059539338841226, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.042059539338841226 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.033723432716530624, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.033723432716530624 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476787, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476787 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254187, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254187 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214941, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214941 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237272, + "mc2": 0.4278199183026475, + "mc2_stderr": 0.015157555430007909 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42739079102715466, + "acc_stderr": 0.017008129844823156, + "acc_norm": 0.45218417945690675, + "acc_norm_stderr": 0.017111567130916785 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/Mistral-instruct-Y24-v6", + "model_sha": "8bb579459c344a1ea4abdc76b52532717ae456ce", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/llama-2-13b-DPO-Y24-v2/result_2023-11-29 06:49:31.json b/Ja3ck/llama-2-13b-DPO-Y24-v2/result_2023-11-29 06:49:31.json new file mode 100644 index 0000000000000000000000000000000000000000..5b39427551fb7d36ba9a4080f8e59e18e6431560 --- /dev/null +++ b/Ja3ck/llama-2-13b-DPO-Y24-v2/result_2023-11-29 06:49:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32764505119453924, + "acc_stderr": 0.013715847940719348, + "acc_norm": 0.3822525597269625, + "acc_norm_stderr": 0.014200454049979288 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36427006572395937, + "acc_stderr": 0.004802413919932662, + "acc_norm": 0.4647480581557459, + "acc_norm_stderr": 0.00497736436479559 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557835, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557835 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.035333892347392454, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.035333892347392454 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.023135287974325628, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.023135287974325628 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679214, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679214 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3950617283950617, + "acc_stderr": 0.027201117666925657, + "acc_norm": 0.3950617283950617, + "acc_norm_stderr": 0.027201117666925657 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579858, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579858 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42385321100917434, + "acc_stderr": 0.021187263209087526, + "acc_norm": 0.42385321100917434, + "acc_norm_stderr": 0.021187263209087526 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423545, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423545 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092487, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092487 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02988691054762697, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02988691054762697 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398866, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398866 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030802, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030802 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45147679324894513, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.45147679324894513, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698609, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698609 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833343, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833343 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715004, + "mc2": 0.41094521391654454, + "mc2_stderr": 0.015180355971022358 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42857142857142855, + "acc_stderr": 0.01701403811929748, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.017189767032130817 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/llama-2-13b-DPO-Y24-v2", + "model_sha": "2b9ef358d9d1cfdb22de7d0d865782bfd45f8ab9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/llama-2-13b-instruct-Y24-v1/result_2023-11-29 06:12:47.json b/Ja3ck/llama-2-13b-instruct-Y24-v1/result_2023-11-29 06:12:47.json new file mode 100644 index 0000000000000000000000000000000000000000..1a2faf4ad55ffe14c87dbe212ecb75a30c4ed54e --- /dev/null +++ b/Ja3ck/llama-2-13b-instruct-Y24-v1/result_2023-11-29 06:12:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3267918088737201, + "acc_stderr": 0.013706665975587336, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668526 + }, + "harness|ko_hellaswag|10": { + "acc": 0.364070902210715, + "acc_stderr": 0.0048018528813297484, + "acc_norm": 0.46415056761601275, + "acc_norm_stderr": 0.004976939333240076 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.0178389560091368, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.0178389560091368 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776296, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679214, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679214 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3950617283950617, + "acc_stderr": 0.027201117666925657, + "acc_norm": 0.3950617283950617, + "acc_norm_stderr": 0.027201117666925657 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579858, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579858 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42201834862385323, + "acc_stderr": 0.02117499140776317, + "acc_norm": 0.42201834862385323, + "acc_norm_stderr": 0.02117499140776317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423545, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423545 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092487, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092487 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150381, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150381 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02988691054762697, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02988691054762697 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398866, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398866 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030802, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030802 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45147679324894513, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.45147679324894513, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29335071707953064, + "acc_stderr": 0.011628520449582075, + "acc_norm": 0.29335071707953064, + "acc_norm_stderr": 0.011628520449582075 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833343, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833343 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.408852370253922, + "mc2_stderr": 0.015158662984848508 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43211334120425027, + "acc_stderr": 0.017031170198851746, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.017189767032130817 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/llama-2-13b-instruct-Y24-v1", + "model_sha": "edce003ff6a63c6a225564d7763a89ade6eaa15d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/llama-2-13b-instruct-Y24-v2/result_2023-11-29 06:29:40.json b/Ja3ck/llama-2-13b-instruct-Y24-v2/result_2023-11-29 06:29:40.json new file mode 100644 index 0000000000000000000000000000000000000000..216d3fce1174d94b55c61a6d80106afdda1c40cb --- /dev/null +++ b/Ja3ck/llama-2-13b-instruct-Y24-v2/result_2023-11-29 06:29:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32337883959044367, + "acc_stderr": 0.01366942163001212, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.01415063143511173 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3632742481577375, + "acc_stderr": 0.00479959984039737, + "acc_norm": 0.4640509858593906, + "acc_norm_stderr": 0.0049768677965835615 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468547, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353228, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353228 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.028099240775809567, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.028099240775809567 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.031204691225150013, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.031204691225150013 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908223, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908223 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.0302422338008545, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.0302422338008545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02351729433596329, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02351729433596329 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.026589231142174267, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.026589231142174267 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662734, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662734 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579858, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579858 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43853211009174314, + "acc_stderr": 0.021274713073954562, + "acc_norm": 0.43853211009174314, + "acc_norm_stderr": 0.021274713073954562 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507215, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507215 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347019, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347019 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372937, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.02850145286039659, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.02850145286039659 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3346938775510204, + "acc_stderr": 0.030209235226242307, + "acc_norm": 0.3346938775510204, + "acc_norm_stderr": 0.030209235226242307 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4050632911392405, + "acc_stderr": 0.03195514741370673, + "acc_norm": 0.4050632911392405, + "acc_norm_stderr": 0.03195514741370673 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2926988265971317, + "acc_stderr": 0.011620949195849523, + "acc_norm": 0.2926988265971317, + "acc_norm_stderr": 0.011620949195849523 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087305, + "mc2": 0.43028202661607795, + "mc2_stderr": 0.015397184787151977 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4639905548996458, + "acc_stderr": 0.017145715365486664, + "acc_norm": 0.5501770956316411, + "acc_norm_stderr": 0.01710357334382571 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/llama-2-13b-instruct-Y24-v2", + "model_sha": "8cb60efc316f104368a65f4b1b68fc52af84f546", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaewoo1/Foundation_Platypus_data/result_2023-10-18 09:16:14.json b/Jaewoo1/Foundation_Platypus_data/result_2023-10-18 09:16:14.json new file mode 100644 index 0000000000000000000000000000000000000000..6e3729e8a31e7e93ea1f229eae47620244c415e0 --- /dev/null +++ b/Jaewoo1/Foundation_Platypus_data/result_2023-10-18 09:16:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30204778156996587, + "acc_stderr": 0.013417519144716417, + "acc_norm": 0.3174061433447099, + "acc_norm_stderr": 0.01360223908803817 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3450507866958773, + "acc_stderr": 0.004744132825391515, + "acc_norm": 0.41196972714598684, + "acc_norm_stderr": 0.00491183773058221 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041692, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041692 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39719029374201786, + "acc_stderr": 0.017497905037159377, + "acc_norm": 0.39719029374201786, + "acc_norm_stderr": 0.017497905037159377 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534422, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534422 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511114, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511114 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.027559949802347817, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.027559949802347817 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419034, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419034 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3384615384615385, + "acc_stderr": 0.023991500500313036, + "acc_norm": 0.3384615384615385, + "acc_norm_stderr": 0.023991500500313036 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3419354838709677, + "acc_stderr": 0.026985289576552732, + "acc_norm": 0.3419354838709677, + "acc_norm_stderr": 0.026985289576552732 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5341880341880342, + "acc_stderr": 0.03267942734081228, + "acc_norm": 0.5341880341880342, + "acc_norm_stderr": 0.03267942734081228 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33962264150943394, + "acc_stderr": 0.029146904747798342, + "acc_norm": 0.33962264150943394, + "acc_norm_stderr": 0.029146904747798342 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652457, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652457 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4129353233830846, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.4129353233830846, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.037161774375660164, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.037161774375660164 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.026296227915613663, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.026296227915613663 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3487654320987654, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.3487654320987654, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.037124548537213684, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.037124548537213684 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3522935779816514, + "acc_stderr": 0.020480568843999004, + "acc_norm": 0.3522935779816514, + "acc_norm_stderr": 0.020480568843999004 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.045604560863872365, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.045604560863872365 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.01890101532209309, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.01890101532209309 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101373, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101373 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952688, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952688 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.01424263007057489, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.01424263007057489 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.026917481224377246, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.026917481224377246 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484375, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484375 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5274261603375527, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.5274261603375527, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.303129074315515, + "acc_stderr": 0.0117386699512543, + "acc_norm": 0.303129074315515, + "acc_norm_stderr": 0.0117386699512543 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006509, + "mc2": 0.4249328187172098, + "mc2_stderr": 0.016337088601279814 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2550177095631641, + "acc_stderr": 0.014985559533428554, + "acc_norm": 0.30932703659976385, + "acc_norm_stderr": 0.015891320505520893 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaewoo1/Foundation_Platypus_data", + "model_sha": "63fbecee8df6cc694880299e37b7cd8f8140942e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaewoo1/KoT-Platypus2_foundation/result_2023-10-16 07:12:51.json b/Jaewoo1/KoT-Platypus2_foundation/result_2023-10-16 07:12:51.json new file mode 100644 index 0000000000000000000000000000000000000000..7de61f7a15a36f9e0fa9621a6b744ee5b65345a1 --- /dev/null +++ b/Jaewoo1/KoT-Platypus2_foundation/result_2023-10-16 07:12:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27047781569965873, + "acc_stderr": 0.012980954547659556, + "acc_norm": 0.3319112627986348, + "acc_norm_stderr": 0.013760988200880541 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3505277833100976, + "acc_stderr": 0.004761601303258889, + "acc_norm": 0.44722166899024096, + "acc_norm_stderr": 0.0049619049491713965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.03599335771456027, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.03599335771456027 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161549, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161549 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.351213282247765, + "acc_stderr": 0.01706998205149943, + "acc_norm": 0.351213282247765, + "acc_norm_stderr": 0.01706998205149943 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.031489558297455304, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.031489558297455304 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31511254019292606, + "acc_stderr": 0.026385273703464496, + "acc_norm": 0.31511254019292606, + "acc_norm_stderr": 0.026385273703464496 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.040103589424622034, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.040103589424622034 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03960933549451207, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03960933549451207 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.028942004040998167, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.028942004040998167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.02255655101013235, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.02255655101013235 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733552, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733552 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4658119658119658, + "acc_stderr": 0.03267942734081228, + "acc_norm": 0.4658119658119658, + "acc_norm_stderr": 0.03267942734081228 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.028985455652334395, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.028985455652334395 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.025644108639267645, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.025644108639267645 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333336, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333336 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594295, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918428, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918428 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624576, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624576 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.02440517393578323, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.02440517393578323 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.31790123456790126, + "acc_stderr": 0.025910063528240865, + "acc_norm": 0.31790123456790126, + "acc_norm_stderr": 0.025910063528240865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3155963302752294, + "acc_stderr": 0.019926117513869666, + "acc_norm": 0.3155963302752294, + "acc_norm_stderr": 0.019926117513869666 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.026256053835718964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.026256053835718964 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.035834961763610625, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.035834961763610625 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.01766784161237899, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.01766784161237899 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537773, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537773 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046972, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046972 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22793296089385476, + "acc_stderr": 0.014030149950805095, + "acc_norm": 0.22793296089385476, + "acc_norm_stderr": 0.014030149950805095 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682486, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19183673469387755, + "acc_stderr": 0.025206963154225395, + "acc_norm": 0.19183673469387755, + "acc_norm_stderr": 0.025206963154225395 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.0299366963871386, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.0299366963871386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2666232073011734, + "acc_stderr": 0.01129383603161213, + "acc_norm": 0.2666232073011734, + "acc_norm_stderr": 0.01129383603161213 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03283472056108567, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03283472056108567 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4297360873033464, + "mc2_stderr": 0.016304548005749996 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23258559622195984, + "acc_stderr": 0.014525169182416493, + "acc_norm": 0.27508854781582054, + "acc_norm_stderr": 0.015353010757952649 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaewoo1/KoT-Platypus2_foundation", + "model_sha": "7e97a65b825f9aa4691fe2bebf14696d80ba831d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA/result_2023-10-04 03:17:08.json b/Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA/result_2023-10-04 03:17:08.json new file mode 100644 index 0000000000000000000000000000000000000000..113f7460903568d5c1a03b126f52055cdbab6d56 --- /dev/null +++ b/Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA/result_2023-10-04 03:17:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.257679180887372, + "acc_stderr": 0.0127807705627684, + "acc_norm": 0.3003412969283277, + "acc_norm_stderr": 0.01339590930995701 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3263294164509062, + "acc_stderr": 0.004679111783653908, + "acc_norm": 0.385381398127863, + "acc_norm_stderr": 0.00485690647371939 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3959131545338442, + "acc_stderr": 0.01748824700697927, + "acc_norm": 0.3959131545338442, + "acc_norm_stderr": 0.01748824700697927 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.030251237579213167, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.030251237579213167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3729903536977492, + "acc_stderr": 0.0274666102131401, + "acc_norm": 0.3729903536977492, + "acc_norm_stderr": 0.0274666102131401 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168264, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3319327731092437, + "acc_stderr": 0.030588697013783667, + "acc_norm": 0.3319327731092437, + "acc_norm_stderr": 0.030588697013783667 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3282051282051282, + "acc_stderr": 0.023807633198657262, + "acc_norm": 0.3282051282051282, + "acc_norm_stderr": 0.023807633198657262 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3387096774193548, + "acc_stderr": 0.02692344605930284, + "acc_norm": 0.3387096774193548, + "acc_norm_stderr": 0.02692344605930284 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5470085470085471, + "acc_stderr": 0.0326109987309862, + "acc_norm": 0.5470085470085471, + "acc_norm_stderr": 0.0326109987309862 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199593, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199593 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.44776119402985076, + "acc_stderr": 0.03516184772952166, + "acc_norm": 0.44776119402985076, + "acc_norm_stderr": 0.03516184772952166 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267437, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267437 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.025992472029306386, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.025992472029306386 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3395061728395062, + "acc_stderr": 0.026348564412011624, + "acc_norm": 0.3395061728395062, + "acc_norm_stderr": 0.026348564412011624 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.034998072761933396, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.034998072761933396 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3339449541284404, + "acc_stderr": 0.020220554196736403, + "acc_norm": 0.3339449541284404, + "acc_norm_stderr": 0.020220554196736403 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.02742047766262925, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.02742047766262925 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351587, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351587 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.018403415710109797, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.018403415710109797 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307857, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307857 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289784, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289784 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.026917481224377243, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.026917481224377243 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.030472526026726492, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.030472526026726492 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3670886075949367, + "acc_stderr": 0.03137624072561618, + "acc_norm": 0.3670886075949367, + "acc_norm_stderr": 0.03137624072561618 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28552803129074317, + "acc_stderr": 0.011535751586665673, + "acc_norm": 0.28552803129074317, + "acc_norm_stderr": 0.011535751586665673 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476196, + "mc2": 0.41968593595047643, + "mc2_stderr": 0.016254999867947123 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2384887839433294, + "acc_stderr": 0.014651663985271578, + "acc_norm": 0.29279811097992914, + "acc_norm_stderr": 0.015644823205401334 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA", + "model_sha": "cbb72323bf2db6eb9ea591a4a882d02964d53eed", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus/result_2023-10-04 09:05:17.json b/Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus/result_2023-10-04 09:05:17.json new file mode 100644 index 0000000000000000000000000000000000000000..507d14683459004ae3b0f17d264e96971a437be9 --- /dev/null +++ b/Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus/result_2023-10-04 09:05:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29692832764505117, + "acc_stderr": 0.013352025976725222, + "acc_norm": 0.34812286689419797, + "acc_norm_stderr": 0.013921008595179342 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35311690898227444, + "acc_stderr": 0.004769618829196517, + "acc_norm": 0.42939653455486954, + "acc_norm_stderr": 0.0049397843114489855 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781169, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781169 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41507024265644954, + "acc_stderr": 0.017620137003655265, + "acc_norm": 0.41507024265644954, + "acc_norm_stderr": 0.017620137003655265 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.024121125416941173, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.024121125416941173 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3774193548387097, + "acc_stderr": 0.02757596072327824, + "acc_norm": 0.3774193548387097, + "acc_norm_stderr": 0.02757596072327824 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5726495726495726, + "acc_stderr": 0.03240847393516326, + "acc_norm": 0.5726495726495726, + "acc_norm_stderr": 0.03240847393516326 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.02977308271331988, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.02977308271331988 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.02763490726417854, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.02763490726417854 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4925373134328358, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.4925373134328358, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548914, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548914 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269955, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269955 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3834862385321101, + "acc_stderr": 0.020847156641915984, + "acc_norm": 0.3834862385321101, + "acc_norm_stderr": 0.020847156641915984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.027780141207023337, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.027780141207023337 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775089, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775089 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.01899970738316267, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.01899970738316267 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03246887243637648, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03246887243637648 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260657, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260657 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335314, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4219409282700422, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.4219409282700422, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271817, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271817 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353383, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353383 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.01607750926613303, + "mc2": 0.4750714543386988, + "mc2_stderr": 0.016159472828434183 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2727272727272727, + "acc_stderr": 0.015311853110300352, + "acc_norm": 0.34946871310507677, + "acc_norm_stderr": 0.01639279708576985 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus", + "model_sha": "1c97acb58f2a740d7994d1ea7b0c02c234bbde3a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaewoo1/Platypus7B_Follow_FT/result_2023-10-21 14:41:08.json b/Jaewoo1/Platypus7B_Follow_FT/result_2023-10-21 14:41:08.json new file mode 100644 index 0000000000000000000000000000000000000000..2a38e96f33d447a72d36aae0212ec045cf617ca5 --- /dev/null +++ b/Jaewoo1/Platypus7B_Follow_FT/result_2023-10-21 14:41:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.197098976109215, + "acc_stderr": 0.011625047669880612, + "acc_norm": 0.26535836177474403, + "acc_norm_stderr": 0.012902554762313964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29904401513642703, + "acc_stderr": 0.0045690346133326004, + "acc_norm": 0.36675960963951404, + "acc_norm_stderr": 0.0048093520750089385 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.0398913985953177, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.0398913985953177 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2796934865900383, + "acc_stderr": 0.016050792148036536, + "acc_norm": 0.2796934865900383, + "acc_norm_stderr": 0.016050792148036536 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.027678452578212387, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.027678452578212387 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21084337349397592, + "acc_stderr": 0.031755547866299194, + "acc_norm": 0.21084337349397592, + "acc_norm_stderr": 0.031755547866299194 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.02540383297817961, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.02540383297817961 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.030216831011508762, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.030216831011508762 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728742, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728742 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863786, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2641025641025641, + "acc_stderr": 0.022352193737453285, + "acc_norm": 0.2641025641025641, + "acc_norm_stderr": 0.022352193737453285 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.029678333141444444, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.029678333141444444 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517414, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517414 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030882736974138663, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030882736974138663 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.0271342916287417, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.0271342916287417 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.038950910157241364, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.038950910157241364 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.29850746268656714, + "acc_stderr": 0.032357437893550424, + "acc_norm": 0.29850746268656714, + "acc_norm_stderr": 0.032357437893550424 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071128, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.024569223600460845, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.024569223600460845 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28807339449541286, + "acc_stderr": 0.019416445892636015, + "acc_norm": 0.28807339449541286, + "acc_norm_stderr": 0.019416445892636015 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.025646863097137908, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.025646863097137908 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.03391160934343602, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.03391160934343602 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.01784808957491323, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.01784808957491323 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012386, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012386 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225605, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225605 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411962, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411962 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174913, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2522816166883963, + "acc_stderr": 0.011092789056875248, + "acc_norm": 0.2522816166883963, + "acc_norm_stderr": 0.011092789056875248 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087288, + "mc2": 0.4755864114164748, + "mc2_stderr": 0.016657423214439165 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1912632821723731, + "acc_stderr": 0.013521790445859333, + "acc_norm": 0.3659976387249115, + "acc_norm_stderr": 0.016561489664895686 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaewoo1/Platypus7B_Follow_FT", + "model_sha": "ac5c77ab817d2d9b0a4f3fc7c609dce3770428d8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaewoo1/Platypus7B_Follow_LoRA/result_2023-10-22 15:04:14.json b/Jaewoo1/Platypus7B_Follow_LoRA/result_2023-10-22 15:04:14.json new file mode 100644 index 0000000000000000000000000000000000000000..84ed866e2ad5042a51e9082891296c28c7bb199c --- /dev/null +++ b/Jaewoo1/Platypus7B_Follow_LoRA/result_2023-10-22 15:04:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27986348122866894, + "acc_stderr": 0.013119040897725923, + "acc_norm": 0.3506825938566553, + "acc_norm_stderr": 0.013944635930726089 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3648675562636925, + "acc_stderr": 0.004804091708812553, + "acc_norm": 0.4856602270464051, + "acc_norm_stderr": 0.004987728900897601 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.017268607560005776, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.017268607560005776 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628817, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628817 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3987138263665595, + "acc_stderr": 0.0278093225857745, + "acc_norm": 0.3987138263665595, + "acc_norm_stderr": 0.0278093225857745 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416828, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416828 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28717948717948716, + "acc_stderr": 0.022939925418530613, + "acc_norm": 0.28717948717948716, + "acc_norm_stderr": 0.022939925418530613 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733555, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733555 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539652, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539652 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49572649572649574, + "acc_stderr": 0.032754892643821316, + "acc_norm": 0.49572649572649574, + "acc_norm_stderr": 0.032754892643821316 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.02898545565233439, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.02898545565233439 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960955, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960955 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4527363184079602, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.4527363184079602, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415412, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415412 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.025992472029306386, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.025992472029306386 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935575, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935575 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38580246913580246, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.38580246913580246, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38860103626943004, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.38860103626943004, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4018348623853211, + "acc_stderr": 0.021020106172997013, + "acc_norm": 0.4018348623853211, + "acc_norm_stderr": 0.021020106172997013 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423556, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423556 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.01887568293806944, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.01887568293806944 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.02997280717046463, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.02997280717046463 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.030299506562154185, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.030299506562154185 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4810126582278481, + "acc_stderr": 0.03252375148090448, + "acc_norm": 0.4810126582278481, + "acc_norm_stderr": 0.03252375148090448 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459324, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459324 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247272, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247272 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752332, + "mc2": 0.3857319099407924, + "mc2_stderr": 0.015181937276962347 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2066115702479339, + "acc_stderr": 0.013919866463909341, + "acc_norm": 0.2987012987012987, + "acc_norm_stderr": 0.015735657391438278 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaewoo1/Platypus7B_Follow_LoRA", + "model_sha": "b963d09e5db0e791858e56e3fafac7e066328014", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jenti-Kaeri/ko-llama2-13b-OrcaPlatypus/result_2023-11-06 07:34:14.json b/Jenti-Kaeri/ko-llama2-13b-OrcaPlatypus/result_2023-11-06 07:34:14.json new file mode 100644 index 0000000000000000000000000000000000000000..7a6cc48abb238803e71b1c452511f4e9446d4eea --- /dev/null +++ b/Jenti-Kaeri/ko-llama2-13b-OrcaPlatypus/result_2023-11-06 07:34:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042194, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40659231228838877, + "acc_stderr": 0.00490193651154613, + "acc_norm": 0.5416251742680741, + "acc_norm_stderr": 0.004972460206842306 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5070242656449553, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.5070242656449553, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097413, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097413 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641086, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641086 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622841, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622841 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972592, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972592 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.026636539741116072, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.026636539741116072 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.02775653525734767, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.02775653525734767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883034, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883034 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.040516463428741406, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.040516463428741406 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31209150326797386, + "acc_stderr": 0.018745011201277657, + "acc_norm": 0.31209150326797386, + "acc_norm_stderr": 0.018745011201277657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169945, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169945 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.025336848563332338, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.025336848563332338 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29726205997392435, + "acc_stderr": 0.011673346173086048, + "acc_norm": 0.29726205997392435, + "acc_norm_stderr": 0.011673346173086048 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.034602283272391704, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.034602283272391704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834559, + "mc2": 0.4313245637601363, + "mc2_stderr": 0.01494158153176466 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44510035419126326, + "acc_stderr": 0.017086417431005474, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.017145715365486664 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jenti-Kaeri/ko-llama2-13b-OrcaPlatypus", + "model_sha": "80952bf913ab217ee77ee0328f3c9e68cc1abf22", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jenti-Kaeri/ko-llama2-13b-platypus/result_2023-11-06 11:48:16.json b/Jenti-Kaeri/ko-llama2-13b-platypus/result_2023-11-06 11:48:16.json new file mode 100644 index 0000000000000000000000000000000000000000..f7666ce48b813438ef3bba1ed27f43129ebb30f9 --- /dev/null +++ b/Jenti-Kaeri/ko-llama2-13b-platypus/result_2023-11-06 11:48:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3199658703071672, + "acc_stderr": 0.013631345807016195, + "acc_norm": 0.3779863481228669, + "acc_norm_stderr": 0.014169664520303101 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3586934873531169, + "acc_stderr": 0.004786368011500456, + "acc_norm": 0.4553873730332603, + "acc_norm_stderr": 0.0049698795328430865 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365778, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365778 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.01775880053421442, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.01775880053421442 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.031602951437766785, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.031602951437766785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215637, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215637 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.027941727346256315, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.027941727346256315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5470085470085471, + "acc_stderr": 0.03261099873098619, + "acc_norm": 0.5470085470085471, + "acc_norm_stderr": 0.03261099873098619 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165581, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165581 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206188, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206188 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027125115513166865, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027125115513166865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42752293577981654, + "acc_stderr": 0.02121091020430043, + "acc_norm": 0.42752293577981654, + "acc_norm_stderr": 0.02121091020430043 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114023, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114023 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347019, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347019 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569746, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842538, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842538 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803538, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803538 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.0372820699868265, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.0372820699868265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.46212361951327446, + "mc2_stderr": 0.015249362527618285 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3955135773317591, + "acc_stderr": 0.016810815902206046, + "acc_norm": 0.4757969303423849, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jenti-Kaeri/ko-llama2-13b-platypus", + "model_sha": "f20decdd9d1525560ce299352c7ee6421c5ec81d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Junmai/KIT-5.8b/result_2023-11-01 12:58:40.json b/Junmai/KIT-5.8b/result_2023-11-01 12:58:40.json new file mode 100644 index 0000000000000000000000000000000000000000..149e8a751e209a16597d56ee46c43985c606c5c5 --- /dev/null +++ b/Junmai/KIT-5.8b/result_2023-11-01 12:58:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2593856655290102, + "acc_stderr": 0.01280827357392708, + "acc_norm": 0.2841296928327645, + "acc_norm_stderr": 0.013179442447653887 + }, + "harness|ko_hellaswag|10": { + "acc": 0.359788886675961, + "acc_stderr": 0.004789575163418654, + "acc_norm": 0.4523003385779725, + "acc_norm_stderr": 0.004967023435680013 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.030267457554898465, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.030267457554898465 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.04541609446503948, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.04541609446503948 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20051085568326948, + "acc_stderr": 0.014317653708594209, + "acc_norm": 0.20051085568326948, + "acc_norm_stderr": 0.014317653708594209 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.037857144650666544, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.037857144650666544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838742, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838742 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.23493975903614459, + "acc_stderr": 0.03300533186128922, + "acc_norm": 0.23493975903614459, + "acc_norm_stderr": 0.03300533186128922 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.1031390134529148, + "acc_stderr": 0.020412564289839272, + "acc_norm": 0.1031390134529148, + "acc_norm_stderr": 0.020412564289839272 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.02863723563980091, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.02863723563980091 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.02176596167215453, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.02176596167215453 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757177, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.03351953879521272, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.03351953879521272 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3486238532110092, + "acc_stderr": 0.020431254090714328, + "acc_norm": 0.3486238532110092, + "acc_norm_stderr": 0.020431254090714328 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.02582916327275748, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.02582916327275748 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19834710743801653, + "acc_stderr": 0.03640118271990947, + "acc_norm": 0.19834710743801653, + "acc_norm_stderr": 0.03640118271990947 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.029923100563683903, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.029923100563683903 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2109704641350211, + "acc_stderr": 0.02655837250266192, + "acc_norm": 0.2109704641350211, + "acc_norm_stderr": 0.02655837250266192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417358, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417358 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707693, + "mc2": 0.40433266036479987, + "mc2_stderr": 0.014934839595516874 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29279811097992914, + "acc_stderr": 0.015644823205401334, + "acc_norm": 0.3907910271546635, + "acc_norm_stderr": 0.01677529846510826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Junmai/KIT-5.8b", + "model_sha": "92023e894134ae843de46164fb793a8641447785", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Junmai/KIT-7B-v1/result_2023-11-07 06:23:03.json b/Junmai/KIT-7B-v1/result_2023-11-07 06:23:03.json new file mode 100644 index 0000000000000000000000000000000000000000..5a17deb2872b6721a6022708ac936ff2448f9e19 --- /dev/null +++ b/Junmai/KIT-7B-v1/result_2023-11-07 06:23:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19112627986348124, + "acc_stderr": 0.0114900552927786, + "acc_norm": 0.23890784982935154, + "acc_norm_stderr": 0.012461071376316621 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25253933479386576, + "acc_stderr": 0.0043358096144803055, + "acc_norm": 0.2394941246763593, + "acc_norm_stderr": 0.004259025448541511 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2541507024265645, + "acc_stderr": 0.01556925469204578, + "acc_norm": 0.2541507024265645, + "acc_norm_stderr": 0.01556925469204578 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386684, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.03329394119073528, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.03329394119073528 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.18971061093247588, + "acc_stderr": 0.02226819625878322, + "acc_norm": 0.18971061093247588, + "acc_norm_stderr": 0.02226819625878322 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.03138147637575498, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.03138147637575498 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300992, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300992 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931666, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.028748983689941048, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.028748983689941048 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.20754716981132076, + "acc_stderr": 0.024959918028911274, + "acc_norm": 0.20754716981132076, + "acc_norm_stderr": 0.024959918028911274 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21957671957671956, + "acc_stderr": 0.021320018599770372, + "acc_norm": 0.21957671957671956, + "acc_norm_stderr": 0.021320018599770372 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587403, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.034089978868575295, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.034089978868575295 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.022409674547304175, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.022409674547304175 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26972477064220185, + "acc_stderr": 0.01902848671111545, + "acc_norm": 0.26972477064220185, + "acc_norm_stderr": 0.01902848671111545 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.20261437908496732, + "acc_stderr": 0.023015446877985672, + "acc_norm": 0.20261437908496732, + "acc_norm_stderr": 0.023015446877985672 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137282, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137282 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.017479487001364764, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.017479487001364764 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872405, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578728, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578728 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.03011642629654057, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.03011642629654057 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594703, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594703 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2503259452411995, + "acc_stderr": 0.011064151027165433, + "acc_norm": 0.2503259452411995, + "acc_norm_stderr": 0.011064151027165433 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251735, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251735 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2, + "acc_stderr": 0.03123475237772118, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03123475237772118 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.01572313952460875, + "mc2": 0.4606951019662925, + "mc2_stderr": 0.015545587074280528 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.06729634002361275, + "acc_stderr": 0.00861355401775773, + "acc_norm": 0.27390791027154665, + "acc_norm_stderr": 0.01533249947479102 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Junmai/KIT-7B-v1", + "model_sha": "fc0e83e20d93b8bfb763205022dee78f36e01e60", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Junmai/KIT-7B-v2/result_2023-11-07 06:26:48.json b/Junmai/KIT-7B-v2/result_2023-11-07 06:26:48.json new file mode 100644 index 0000000000000000000000000000000000000000..10802510a8094f8a333be058bbea26a5725f48a2 --- /dev/null +++ b/Junmai/KIT-7B-v2/result_2023-11-07 06:26:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.17406143344709898, + "acc_stderr": 0.011080177129482213, + "acc_norm": 0.23464163822525597, + "acc_norm_stderr": 0.01238387356076867 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2528380800637323, + "acc_stderr": 0.004337506344899915, + "acc_norm": 0.23889663413662618, + "acc_norm_stderr": 0.004255380050015134 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777576, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777576 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.03455473702325436, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03455473702325436 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.20257234726688103, + "acc_stderr": 0.022827317491059682, + "acc_norm": 0.20257234726688103, + "acc_norm_stderr": 0.022827317491059682 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134988, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134988 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082395, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082395 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267045, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267045 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764805, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764805 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.02544786382510862, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.02544786382510862 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367746, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367746 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.029475250236017197, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.029475250236017197 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198816, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198816 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.0329109957861577, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.0329109957861577 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400571, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400571 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.017923087667803053, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.017923087667803053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790607, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790607 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888142, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888142 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926605, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.016992723465466226, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.016992723465466226 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.0443280405529152, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.0443280405529152 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152593, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152593 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3183673469387755, + "acc_stderr": 0.02982253379398209, + "acc_norm": 0.3183673469387755, + "acc_norm_stderr": 0.02982253379398209 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251735, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251735 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.0315841532404771, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.0315841532404771 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087304, + "mc2": 0.463216438419055, + "mc2_stderr": 0.015329707584198729 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07319952774498228, + "acc_stderr": 0.008954927647725423, + "acc_norm": 0.28689492325855964, + "acc_norm_stderr": 0.015550809966781778 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Junmai/KIT-7b-v2", + "model_sha": "20725892ef31e719453654c06747f48934ccdd57", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Junmai/KIT-7B-v2/result_2023-11-07 06:54:57.json b/Junmai/KIT-7B-v2/result_2023-11-07 06:54:57.json new file mode 100644 index 0000000000000000000000000000000000000000..26a4104ace84311415d0137804bb730001971313 --- /dev/null +++ b/Junmai/KIT-7B-v2/result_2023-11-07 06:54:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.17406143344709898, + "acc_stderr": 0.011080177129482213, + "acc_norm": 0.23464163822525597, + "acc_norm_stderr": 0.01238387356076867 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2528380800637323, + "acc_stderr": 0.004337506344899915, + "acc_norm": 0.23889663413662618, + "acc_norm_stderr": 0.004255380050015134 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777576, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777576 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.03455473702325436, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03455473702325436 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.20257234726688103, + "acc_stderr": 0.022827317491059682, + "acc_norm": 0.20257234726688103, + "acc_norm_stderr": 0.022827317491059682 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134988, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134988 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082395, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082395 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267045, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267045 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764805, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764805 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.02544786382510862, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.02544786382510862 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367746, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367746 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.029475250236017197, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.029475250236017197 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198816, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198816 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.0329109957861577, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.0329109957861577 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400571, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400571 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.017923087667803053, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.017923087667803053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790607, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790607 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888142, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888142 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926605, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.016992723465466226, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.016992723465466226 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.0443280405529152, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.0443280405529152 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152593, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152593 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3183673469387755, + "acc_stderr": 0.02982253379398209, + "acc_norm": 0.3183673469387755, + "acc_norm_stderr": 0.02982253379398209 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251735, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251735 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.0315841532404771, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.0315841532404771 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087304, + "mc2": 0.4632137219137931, + "mc2_stderr": 0.015329765440912904 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07319952774498228, + "acc_stderr": 0.008954927647725423, + "acc_norm": 0.28689492325855964, + "acc_norm_stderr": 0.015550809966781778 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Junmai/KIT-7B-v2", + "model_sha": "20725892ef31e719453654c06747f48934ccdd57", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Junmai/KIT-7B-v3/result_2023-11-09 02:14:48.json b/Junmai/KIT-7B-v3/result_2023-11-09 02:14:48.json new file mode 100644 index 0000000000000000000000000000000000000000..700975749a3237fa6e87084a494bbae04013a689 --- /dev/null +++ b/Junmai/KIT-7B-v3/result_2023-11-09 02:14:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21416382252559726, + "acc_stderr": 0.011988383205966494, + "acc_norm": 0.26535836177474403, + "acc_norm_stderr": 0.012902554762313964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.253734315873332, + "acc_stderr": 0.004342580277662732, + "acc_norm": 0.2401911969727146, + "acc_norm_stderr": 0.004263263933601555 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.046561471100123514, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.046561471100123514 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.227330779054917, + "acc_stderr": 0.014987270640946015, + "acc_norm": 0.227330779054917, + "acc_norm_stderr": 0.014987270640946015 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174023, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174023 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.1829787234042553, + "acc_stderr": 0.02527604100044997, + "acc_norm": 0.1829787234042553, + "acc_norm_stderr": 0.02527604100044997 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.15060240963855423, + "acc_stderr": 0.02784386378726433, + "acc_norm": 0.15060240963855423, + "acc_norm_stderr": 0.02784386378726433 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.024619771956697165, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.024619771956697165 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.16591928251121077, + "acc_stderr": 0.02496755319654716, + "acc_norm": 0.16591928251121077, + "acc_norm_stderr": 0.02496755319654716 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.0384487613978527, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.0384487613978527 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036843, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036843 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244442, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244442 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924812, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924812 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.029213549414372177, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.029213549414372177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.0231193627582323, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.0231193627582323 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.031618563353586086, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.031618563353586086 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.025988500792411898, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.025988500792411898 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20512820512820512, + "acc_stderr": 0.026453508054040335, + "acc_norm": 0.20512820512820512, + "acc_norm_stderr": 0.026453508054040335 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724064, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724064 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.19900497512437812, + "acc_stderr": 0.02823136509275841, + "acc_norm": 0.19900497512437812, + "acc_norm_stderr": 0.02823136509275841 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587404, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587404 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02378858355165854, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02378858355165854 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30458715596330277, + "acc_stderr": 0.019732299420354038, + "acc_norm": 0.30458715596330277, + "acc_norm_stderr": 0.019732299420354038 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19008264462809918, + "acc_stderr": 0.035817969517092825, + "acc_norm": 0.19008264462809918, + "acc_norm_stderr": 0.035817969517092825 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.01747948700136476, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.01747948700136476 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461004, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461004 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.03405702838185693, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.03405702838185693 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631296, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631296 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635317003, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635317003 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.02691748122437722, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.02691748122437722 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2897959183673469, + "acc_stderr": 0.029043088683304328, + "acc_norm": 0.2897959183673469, + "acc_norm_stderr": 0.029043088683304328 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.035465630196243346, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.035465630196243346 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.01489627744104183, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0885478158205431, + "acc_stderr": 0.00976721370275642, + "acc_norm": 0.19952774498229045, + "acc_norm_stderr": 0.013740090947621325 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Junmai/KIT-7B-v3", + "model_sha": "17167805a31f62fa72d3a5c4dc2abf7201a3395d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/KRAFTON/KORani-v1-13B/result_2023-10-17 13:34:31.json b/KRAFTON/KORani-v1-13B/result_2023-10-17 13:34:31.json new file mode 100644 index 0000000000000000000000000000000000000000..229475938b92c3acce42d7b313d93d56abe180e4 --- /dev/null +++ b/KRAFTON/KORani-v1-13B/result_2023-10-17 13:34:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.3515358361774744, + "acc_norm_stderr": 0.013952413699600938 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39533957379008167, + "acc_stderr": 0.004879242848473461, + "acc_norm": 0.5114519020115514, + "acc_norm_stderr": 0.0049884724594180165 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.0398913985953177, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.0398913985953177 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2567049808429119, + "acc_stderr": 0.015620480263064533, + "acc_norm": 0.2567049808429119, + "acc_norm_stderr": 0.015620480263064533 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073465, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073465 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.02895734278834235, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.02895734278834235 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.034605799075530276, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.034605799075530276 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.0244061620946689, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.0244061620946689 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.22869955156950672, + "acc_stderr": 0.028188240046929196, + "acc_norm": 0.22869955156950672, + "acc_norm_stderr": 0.028188240046929196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18686868686868688, + "acc_stderr": 0.02777253333421899, + "acc_norm": 0.18686868686868688, + "acc_norm_stderr": 0.02777253333421899 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.03664666337225256, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.03664666337225256 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.035240689515674495, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.035240689515674495 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.026265024608275882, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.026265024608275882 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2076923076923077, + "acc_stderr": 0.020567539567246787, + "acc_norm": 0.2076923076923077, + "acc_norm_stderr": 0.020567539567246787 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1724137931034483, + "acc_stderr": 0.026577672183036572, + "acc_norm": 0.1724137931034483, + "acc_norm_stderr": 0.026577672183036572 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.20967741935483872, + "acc_stderr": 0.02315787934908353, + "acc_norm": 0.20967741935483872, + "acc_norm_stderr": 0.02315787934908353 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.02961432369045665, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.02961432369045665 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21132075471698114, + "acc_stderr": 0.025125766484827845, + "acc_norm": 0.21132075471698114, + "acc_norm_stderr": 0.025125766484827845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072776, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072776 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.025644108639267645, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.025644108639267645 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008936, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.029929415408348377, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.029929415408348377 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.02122708244944504, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.02122708244944504 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.02378620325550829, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.02378620325550829 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.033220157957767414, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.033220157957767414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25308641975308643, + "acc_stderr": 0.024191808600712992, + "acc_norm": 0.25308641975308643, + "acc_norm_stderr": 0.024191808600712992 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.029519282616817247, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.029519282616817247 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21100917431192662, + "acc_stderr": 0.017493922404112648, + "acc_norm": 0.21100917431192662, + "acc_norm_stderr": 0.017493922404112648 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.024954184324879905, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.024954184324879905 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516304, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.017986615304030312, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.017986615304030312 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432407, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432407 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.025416428388767474, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.025416428388767474 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961459, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961459 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.024398192986654924, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.024398192986654924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.0265370453121453, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.0265370453121453 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.011005971399927234, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.011005971399927234 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251735, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251735 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.19393939393939394, + "acc_stderr": 0.030874145136562097, + "acc_norm": 0.19393939393939394, + "acc_norm_stderr": 0.030874145136562097 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.0151274270965207, + "mc2": 0.40538205465914606, + "mc2_stderr": 0.01537488137847706 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3789846517119244, + "acc_stderr": 0.01667926068422928, + "acc_norm": 0.4734356552538371, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "KRAFTON/KORani-v1-13B", + "model_sha": "a699d0cebc4815f33854bc83065a03fc9008473c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/KRAFTON/KORani-v2-13B/result_2023-10-17 13:34:06.json b/KRAFTON/KORani-v2-13B/result_2023-10-17 13:34:06.json new file mode 100644 index 0000000000000000000000000000000000000000..a1f8e76463faca8844cda24f4d5dc7629b05dd18 --- /dev/null +++ b/KRAFTON/KORani-v2-13B/result_2023-10-17 13:34:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2960750853242321, + "acc_stderr": 0.013340916085246263, + "acc_norm": 0.3370307167235495, + "acc_norm_stderr": 0.013813476652902265 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35241983668591914, + "acc_stderr": 0.004767475366689779, + "acc_norm": 0.42252539334793865, + "acc_norm_stderr": 0.004929517011508216 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.037712831076265434, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.037712831076265434 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37547892720306514, + "acc_stderr": 0.01731661319718279, + "acc_norm": 0.37547892720306514, + "acc_norm_stderr": 0.01731661319718279 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122145, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122145 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.0362933532994786, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.0362933532994786 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.027950481494401266, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.027950481494401266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929188, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929188 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.02394672474156397, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.02394672474156397 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970187, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970187 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567107, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567107 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5683760683760684, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.5683760683760684, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.029773082713319878, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.029773082713319878 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.42786069651741293, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.42786069651741293, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.035676037996391685, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.035676037996391685 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02141168439369418, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02141168439369418 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625658, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625658 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30642201834862387, + "acc_stderr": 0.019765517220458523, + "acc_norm": 0.30642201834862387, + "acc_norm_stderr": 0.019765517220458523 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791438, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791438 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.034597776068105365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.034597776068105365 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460994, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605617, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.02667925227010312, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.02667925227010312 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.39662447257383965, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.39662447257383965, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27640156453715775, + "acc_stderr": 0.011422153194553567, + "acc_norm": 0.27640156453715775, + "acc_norm_stderr": 0.011422153194553567 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087305, + "mc2": 0.44326975161880294, + "mc2_stderr": 0.015781962014868475 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31995277449822906, + "acc_stderr": 0.016037153840280538, + "acc_norm": 0.3955135773317591, + "acc_norm_stderr": 0.016810815902206042 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "KRAFTON/KORani-v2-13B", + "model_sha": "12dbb4046d3fabb3b64c3eab2ecc91faec1af9e9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/KRAFTON/KORani-v3-13B/result_2023-10-17 13:33:45.json b/KRAFTON/KORani-v3-13B/result_2023-10-17 13:33:45.json new file mode 100644 index 0000000000000000000000000000000000000000..01fee2eaf1fd7baf8f0b8fa8fd2f7034bfe7a7a7 --- /dev/null +++ b/KRAFTON/KORani-v3-13B/result_2023-10-17 13:33:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3046075085324232, + "acc_stderr": 0.01344952210993249, + "acc_norm": 0.34726962457337884, + "acc_norm_stderr": 0.013913034529620442 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3494323839872535, + "acc_stderr": 0.004758162967997396, + "acc_norm": 0.4313881696873133, + "acc_norm_stderr": 0.004942578520987348 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3665389527458493, + "acc_stderr": 0.01723124462679705, + "acc_norm": 0.3665389527458493, + "acc_norm_stderr": 0.01723124462679705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501117, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501117 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.027559949802347817, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.027559949802347817 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185553, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185553 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34102564102564104, + "acc_stderr": 0.024035489676335044, + "acc_norm": 0.34102564102564104, + "acc_norm_stderr": 0.024035489676335044 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.02770935967503249, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.02770935967503249 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5811965811965812, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.5811965811965812, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33584905660377357, + "acc_stderr": 0.029067220146644826, + "acc_norm": 0.33584905660377357, + "acc_norm_stderr": 0.029067220146644826 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276611, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276611 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43781094527363185, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.43781094527363185, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.0339175032232166, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.0339175032232166 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113935, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113935 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.027044538138402616, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.027044538138402616 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3174311926605505, + "acc_stderr": 0.0199571521984605, + "acc_norm": 0.3174311926605505, + "acc_norm_stderr": 0.0199571521984605 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.01877168389352817, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.01877168389352817 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.02746470844202213, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.02746470844202213 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605607, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605607 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859933, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859933 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.025035845227711254, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.025035845227711254 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3924050632911392, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.3924050632911392, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085451, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085451 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.03374499356319355, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.03374499356319355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834562, + "mc2": 0.44032476462099357, + "mc2_stderr": 0.015871156864559203 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29515938606847697, + "acc_stderr": 0.015681535229192186, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.01661661284322494 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "KRAFTON/KORani-v3-13B", + "model_sha": "d6479f9de126caf02a770e5e8db4524a0ccb4db7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/KT-AI/midm-bitext-S-7B-inst-v1/result_2023-10-30 15:41:34.json b/KT-AI/midm-bitext-S-7B-inst-v1/result_2023-10-30 15:41:34.json new file mode 100644 index 0000000000000000000000000000000000000000..b4f328e8b65821b1f50e57fd044887ad7f5c0868 --- /dev/null +++ b/KT-AI/midm-bitext-S-7B-inst-v1/result_2023-10-30 15:41:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.013847460518892981, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4137621987651862, + "acc_stderr": 0.004915003499517832, + "acc_norm": 0.5510854411471818, + "acc_norm_stderr": 0.004963669199433383 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.01775339697390848, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.01775339697390848 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.0424463323835323, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.0424463323835323 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956281, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956281 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.035094383488796295, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.035094383488796295 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.1962962962962963, + "acc_stderr": 0.024217421327417155, + "acc_norm": 0.1962962962962963, + "acc_norm_stderr": 0.024217421327417155 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.0236369759961018, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.0236369759961018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.02668013476167922, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.02668013476167922 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127155, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127155 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978252, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978252 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.019431775677037313, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.019431775677037313 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.03154696285656629, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.03154696285656629 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29608938547486036, + "acc_stderr": 0.015268677317602286, + "acc_norm": 0.29608938547486036, + "acc_norm_stderr": 0.015268677317602286 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.0304725260267265, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.0304725260267265 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31681877444589307, + "acc_stderr": 0.011882349954722997, + "acc_norm": 0.31681877444589307, + "acc_norm_stderr": 0.011882349954722997 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350194, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350194 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.18787878787878787, + "acc_stderr": 0.030501934059429144, + "acc_norm": 0.18787878787878787, + "acc_norm_stderr": 0.030501934059429144 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589657, + "mc2": 0.4574707149506456, + "mc2_stderr": 0.015369860749341643 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5844155844155844, + "acc_stderr": 0.016943586313076575, + "acc_norm": 0.5997638724911453, + "acc_norm_stderr": 0.016844693510505052 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "KT-AI/midm-bitext-S-7B-inst-v1", + "model_sha": "88545caeab1463c83a15c23f5282cd8ea781dd0b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/LDCC-with-korca/result_2023-11-06 01:29:42.json b/Kaeri-Jenti/LDCC-with-korca/result_2023-11-06 01:29:42.json new file mode 100644 index 0000000000000000000000000000000000000000..1a35cc825d9cd29e265edc23c2dcf0a09c0e20bc --- /dev/null +++ b/Kaeri-Jenti/LDCC-with-korca/result_2023-11-06 01:29:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.014312094557946704, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.014537144444284738 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42202748456482775, + "acc_stderr": 0.004928735103635845, + "acc_norm": 0.5664210316669986, + "acc_norm_stderr": 0.004945558069852528 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.01780208713585031, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.01780208713585031 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587952, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587952 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.03536085947529481, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.03536085947529481 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126177, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126177 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718324, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718324 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514565, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514565 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02391998416404774, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02391998416404774 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5669724770642202, + "acc_stderr": 0.021244146569074345, + "acc_norm": 0.5669724770642202, + "acc_norm_stderr": 0.021244146569074345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.01984828016840116, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.01984828016840116 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670736, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316498, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316498 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.4486998920807941, + "mc2_stderr": 0.015146223309438359 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170213, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.016747577991642792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/LDCC-with-korca", + "model_sha": "50bca191d06902b5359abb3b1007b8106eff41f6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/LDCC-with-openorca-and-korca/result_2023-11-06 11:07:08.json b/Kaeri-Jenti/LDCC-with-openorca-and-korca/result_2023-11-06 11:07:08.json new file mode 100644 index 0000000000000000000000000000000000000000..3d4cf313ced5cde007c2356b78329a12989d6351 --- /dev/null +++ b/Kaeri-Jenti/LDCC-with-openorca-and-korca/result_2023-11-06 11:07:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4035836177474403, + "acc_stderr": 0.014337158914268436, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42162915753833896, + "acc_stderr": 0.004928105880776079, + "acc_norm": 0.566122286397132, + "acc_norm_stderr": 0.004945956744943813 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.017810403925435342, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.017810403925435342 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.0433643270799318, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.0433643270799318 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043840985, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043840985 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776285, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776285 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982022, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982022 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.563302752293578, + "acc_stderr": 0.021264820158714205, + "acc_norm": 0.563302752293578, + "acc_norm_stderr": 0.021264820158714205 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891765, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635892, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635892 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536048, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536048 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335314, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.031996152328062875, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.031996152328062875 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564642, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564642 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.448398942069094, + "mc2_stderr": 0.015159190515111855 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.01718832921965428, + "acc_norm": 0.6221959858323495, + "acc_norm_stderr": 0.01666908284069498 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/LDCC-with-openorca-and-korca", + "model_sha": "7f845005dc24e13e4fe380e32aa1b0f649b85743", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/LDCC-with-openorca/result_2023-11-05 09:08:43.json b/Kaeri-Jenti/LDCC-with-openorca/result_2023-11-05 09:08:43.json new file mode 100644 index 0000000000000000000000000000000000000000..47c5106c0172f0215bc1da826db315361bdfa3bd --- /dev/null +++ b/Kaeri-Jenti/LDCC-with-openorca/result_2023-11-05 09:08:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4044368600682594, + "acc_stderr": 0.014342036483436177, + "acc_norm": 0.46331058020477817, + "acc_norm_stderr": 0.014572000527756994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4217287392949612, + "acc_stderr": 0.0049282634946167326, + "acc_norm": 0.5668193586934873, + "acc_norm_stderr": 0.0049450236570322765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5389527458492975, + "acc_stderr": 0.017825621793239002, + "acc_norm": 0.5389527458492975, + "acc_norm_stderr": 0.017825621793239002 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009794, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009794 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602592, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.02878222756134724, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.02878222756134724 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553974, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763125, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763125 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.012267935477519032, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.012267935477519032 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.44749695382551585, + "mc2_stderr": 0.015166538817685267 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998567, + "acc_norm": 0.6162927981109799, + "acc_norm_stderr": 0.016718924637231826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/LDCC-with-openorca", + "model_sha": "f2d0734b7c42df6a4c4cd53aed9f5950b28a7546", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/LDCC-with-openorca2/result_2023-11-07 14:53:29.json b/Kaeri-Jenti/LDCC-with-openorca2/result_2023-11-07 14:53:29.json new file mode 100644 index 0000000000000000000000000000000000000000..8c84374cfd35fbc04951c9ae13067f2361f46f12 --- /dev/null +++ b/Kaeri-Jenti/LDCC-with-openorca2/result_2023-11-07 14:53:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4044368600682594, + "acc_stderr": 0.014342036483436177, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.014570144495075576 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42182832105158335, + "acc_stderr": 0.004928420903026553, + "acc_norm": 0.5668193586934873, + "acc_norm_stderr": 0.0049450236570322765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.541507024265645, + "acc_stderr": 0.017818248603465585, + "acc_norm": 0.541507024265645, + "acc_norm_stderr": 0.017818248603465585 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.03193705726200293, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.03193705726200293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.03028500925900979, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.03028500925900979 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518026, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518026 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048487, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048487 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602592, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553974, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681417, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301847, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.01226793547751903, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.01226793547751903 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4478974507988722, + "mc2_stderr": 0.015169839199333743 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4817001180637544, + "acc_stderr": 0.017178836639177745, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.016738130760321757 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/LDCC-with-openorca2", + "model_sha": "e0f7d47f657361c2fffd4a67428b5ab523b84261", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/Llama-2-kor-13B/result_2023-11-24 00:19:01.json b/Kaeri-Jenti/Llama-2-kor-13B/result_2023-11-24 00:19:01.json new file mode 100644 index 0000000000000000000000000000000000000000..7e3013c8878c12ffe672f09f2aec6a0daabd3d34 --- /dev/null +++ b/Kaeri-Jenti/Llama-2-kor-13B/result_2023-11-24 00:19:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3984641638225256, + "acc_stderr": 0.014306946052735565, + "acc_norm": 0.45819112627986347, + "acc_norm_stderr": 0.0145602203087147 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4231228838876718, + "acc_stderr": 0.004930448527146665, + "acc_norm": 0.5640310695080661, + "acc_norm_stderr": 0.004948696280312416 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5542784163473818, + "acc_stderr": 0.017774297282479503, + "acc_norm": 0.5542784163473818, + "acc_norm_stderr": 0.017774297282479503 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.0251418015111775, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.0251418015111775 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681848, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681848 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413866, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413866 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5706422018348624, + "acc_stderr": 0.021222286397236508, + "acc_norm": 0.5706422018348624, + "acc_norm_stderr": 0.021222286397236508 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147127, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147127 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.027996723180631466, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.027996723180631466 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.019610851474880283, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.019610851474880283 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34028683181225555, + "acc_stderr": 0.012101217610223798, + "acc_norm": 0.34028683181225555, + "acc_norm_stderr": 0.012101217610223798 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.01605899902610062, + "mc2": 0.45367177115043, + "mc2_stderr": 0.015134250403335572 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43919716646989376, + "acc_stderr": 0.0170627757447807, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/Llama-2-kor-13B", + "model_sha": "de4f458a28b96221babb7655c994221ea3d27c6f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/llama-2-koen-13b-v1.2/result_2023-11-09 02:00:27.json b/Kaeri-Jenti/llama-2-koen-13b-v1.2/result_2023-11-09 02:00:27.json new file mode 100644 index 0000000000000000000000000000000000000000..19fbb2367aa42f1539eeca24b1c2795266ec661f --- /dev/null +++ b/Kaeri-Jenti/llama-2-koen-13b-v1.2/result_2023-11-09 02:00:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.45819112627986347, + "acc_norm_stderr": 0.014560220308714702 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4206333399721171, + "acc_stderr": 0.004926518439372259, + "acc_norm": 0.5676160127464649, + "acc_norm_stderr": 0.004943945069611462 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.043182754919779756, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.043182754919779756 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5811965811965812, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.5811965811965812, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138938, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138938 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.021436420955529424, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.021436420955529424 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.01943177567703731, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.01943177567703731 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859672, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859672 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085455, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085455 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766368, + "mc2": 0.4100851120970672, + "mc2_stderr": 0.014797143070922393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5029515938606848, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427125 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/llama-2-koen-13b-v1.2", + "model_sha": "cb9e8ff37d427ab588d666b5c6994498a10084de", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/llama-2-koen-13b-v1.3/result_2023-11-27 00:37:09.json b/Kaeri-Jenti/llama-2-koen-13b-v1.3/result_2023-11-27 00:37:09.json new file mode 100644 index 0000000000000000000000000000000000000000..494045527def032eb4a8f4d6f221357de8e506cc --- /dev/null +++ b/Kaeri-Jenti/llama-2-koen-13b-v1.3/result_2023-11-27 00:37:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000326, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633832 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4208325034853615, + "acc_stderr": 0.004926837572202162, + "acc_norm": 0.563433578968333, + "acc_norm_stderr": 0.00494946256368134 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.017855434554041993, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.017855434554041993 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.02517404838400078, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.02517404838400078 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342596, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5431192660550459, + "acc_stderr": 0.02135745878522621, + "acc_norm": 0.5431192660550459, + "acc_norm_stderr": 0.02135745878522621 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392868, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392868 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.019249785691717217, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.019249785691717217 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289784, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289784 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085455, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085455 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842883, + "mc2": 0.4216743604441881, + "mc2_stderr": 0.014868064514296196 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45454545454545453, + "acc_stderr": 0.017119172208061504, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/llama-2-koen-13b-v1.3", + "model_sha": "a926510aca20383788b1d49fc2a16edac5919f2c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/llama-2-koen-13b-with-ko-wiki/result_2023-11-08 11:54:25.json b/Kaeri-Jenti/llama-2-koen-13b-with-ko-wiki/result_2023-11-08 11:54:25.json new file mode 100644 index 0000000000000000000000000000000000000000..882ada039b0ae5be244fcb0af16f5494750bb6f6 --- /dev/null +++ b/Kaeri-Jenti/llama-2-koen-13b-with-ko-wiki/result_2023-11-08 11:54:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.363481228668942, + "acc_stderr": 0.014056207319068285, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580122 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3769169488149771, + "acc_stderr": 0.004836234143655416, + "acc_norm": 0.498406691894045, + "acc_norm_stderr": 0.004989756076956349 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431672, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431672 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.02779187875313227, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.02779187875313227 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.02983280811479601, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.02983280811479601 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.046737523336702384, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.046737523336702384 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.037143259063020635, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.037143259063020635 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.026817718130348913, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.026817718130348913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.03499807276193338, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.03499807276193338 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41651376146788993, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.41651376146788993, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.019312676065786554, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.019312676065786554 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510934, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510934 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.02866199620233531, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.02866199620233531 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.03181425118197787, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.03181425118197787 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.03254462010767859, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.03254462010767859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3005215123859192, + "acc_stderr": 0.011709918883039124, + "acc_norm": 0.3005215123859192, + "acc_norm_stderr": 0.011709918883039124 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.033086111132364336, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.033086111132364336 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22888616891064872, + "mc1_stderr": 0.014706994909055027, + "mc2": 0.3806541455843524, + "mc2_stderr": 0.014913115418195339 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3990554899645809, + "acc_stderr": 0.016836377292849296, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.01717567127983645 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/llama-2-koen-13b-with-ko-wiki", + "model_sha": "c04aefa73af3678c5fd2df2750199589dbec1216", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Korabbit/llama-2-ko-7b-bilingual/result_2023-10-26 04:30:26.json b/Korabbit/llama-2-ko-7b-bilingual/result_2023-10-26 04:30:26.json new file mode 100644 index 0000000000000000000000000000000000000000..22add5fa0e2bf69dcc18090af66bc81bf8f2fa2e --- /dev/null +++ b/Korabbit/llama-2-ko-7b-bilingual/result_2023-10-26 04:30:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3387372013651877, + "acc_stderr": 0.013830568927974332, + "acc_norm": 0.40017064846416384, + "acc_norm_stderr": 0.014317197787809174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4023102967536347, + "acc_stderr": 0.004893617014975314, + "acc_norm": 0.5194184425413264, + "acc_norm_stderr": 0.004986016938678532 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.421455938697318, + "acc_stderr": 0.017657976412654857, + "acc_norm": 0.421455938697318, + "acc_norm_stderr": 0.017657976412654857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231004, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3987138263665595, + "acc_stderr": 0.0278093225857745, + "acc_norm": 0.3987138263665595, + "acc_norm_stderr": 0.0278093225857745 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.043171711948702535, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.043171711948702535 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3787878787878788, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.3787878787878788, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30256410256410254, + "acc_stderr": 0.023290888053772725, + "acc_norm": 0.30256410256410254, + "acc_norm_stderr": 0.023290888053772725 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233484, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233484 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534327, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534327 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5427350427350427, + "acc_stderr": 0.03263622596380688, + "acc_norm": 0.5427350427350427, + "acc_norm_stderr": 0.03263622596380688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.02944517532819959, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.02944517532819959 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360383, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360383 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339193, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339193 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.034140140070440354, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.034140140070440354 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3439306358381503, + "acc_stderr": 0.025574123786546672, + "acc_norm": 0.3439306358381503, + "acc_norm_stderr": 0.025574123786546672 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935575, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935575 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3395061728395062, + "acc_stderr": 0.026348564412011628, + "acc_norm": 0.3395061728395062, + "acc_norm_stderr": 0.026348564412011628 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3706422018348624, + "acc_stderr": 0.02070745816435298, + "acc_norm": 0.3706422018348624, + "acc_norm_stderr": 0.02070745816435298 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.027245613047215355, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.027245613047215355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507222, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012393, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012393 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.026799562024887678, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.026799562024887678 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2897959183673469, + "acc_stderr": 0.02904308868330433, + "acc_norm": 0.2897959183673469, + "acc_norm_stderr": 0.02904308868330433 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45147679324894513, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.45147679324894513, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698602, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698602 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.01517698502770769, + "mc2": 0.41272169126715796, + "mc2_stderr": 0.015689006867142138 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4332939787485242, + "acc_stderr": 0.0170366836418931, + "acc_norm": 0.5112160566706021, + "acc_norm_stderr": 0.017186028469489294 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Korabbit/llama-2-ko-7b-bilingual", + "model_sha": "3182f7a997a431a53f9157384c6fb742619f8fbc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Korabbit/llama-2-ko-7b-pru/result_2023-11-05 04:27:01.json b/Korabbit/llama-2-ko-7b-pru/result_2023-11-05 04:27:01.json new file mode 100644 index 0000000000000000000000000000000000000000..80245eb4b25442e46f683e1b6e7dfd096e64eb6e --- /dev/null +++ b/Korabbit/llama-2-ko-7b-pru/result_2023-11-05 04:27:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22098976109215018, + "acc_stderr": 0.01212492920681826, + "acc_norm": 0.2790102389078498, + "acc_norm_stderr": 0.013106784883601346 + }, + "harness|ko_hellaswag|10": { + "acc": 0.27703644692292373, + "acc_stderr": 0.004466200055292544, + "acc_norm": 0.3209520015933081, + "acc_norm_stderr": 0.004658882929099516 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.18128654970760233, + "acc_stderr": 0.029547741687640024, + "acc_norm": 0.18128654970760233, + "acc_norm_stderr": 0.029547741687640024 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20945083014048532, + "acc_stderr": 0.014551310568143698, + "acc_norm": 0.20945083014048532, + "acc_norm_stderr": 0.014551310568143698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.027678452578212377, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.027678452578212377 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789396, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789396 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11210762331838565, + "acc_stderr": 0.0211748942063461, + "acc_norm": 0.11210762331838565, + "acc_norm_stderr": 0.0211748942063461 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847836, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847836 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.048108401480826346, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.048108401480826346 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36923076923076925, + "acc_stderr": 0.024468615241478912, + "acc_norm": 0.36923076923076925, + "acc_norm_stderr": 0.024468615241478912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20512820512820512, + "acc_stderr": 0.02645350805404033, + "acc_norm": 0.20512820512820512, + "acc_norm_stderr": 0.02645350805404033 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.02863723563980092, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.02863723563980092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.034564257450869995, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.034564257450869995 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526501, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526501 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400572, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400572 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.03499807276193338, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.03499807276193338 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3486238532110092, + "acc_stderr": 0.020431254090714328, + "acc_norm": 0.3486238532110092, + "acc_norm_stderr": 0.020431254090714328 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.02625605383571896, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.02625605383571896 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.03520893951097653, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.03520893951097653 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926605, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.016774672365468517, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.016774672365468517 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.01099615663514269, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.01099615663514269 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.029554292605695053, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.029554292605695053 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557961, + "mc2": 0.45977172073584577, + "mc2_stderr": 0.01635863808501988 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23494687131050768, + "acc_stderr": 0.014576237948550175, + "acc_norm": 0.32585596221959856, + "acc_norm_stderr": 0.016114023894800326 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Korabbit/llama-2-ko-7b-pru", + "model_sha": "cc3233e29e5358863df06a27bc23e1f07ed994e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Korabbit/llama-2-ko-7b/result_2023-11-05 04:27:18.json b/Korabbit/llama-2-ko-7b/result_2023-11-05 04:27:18.json new file mode 100644 index 0000000000000000000000000000000000000000..671268eaf4a045a8c8b73bd7c4b7d76418306c8d --- /dev/null +++ b/Korabbit/llama-2-ko-7b/result_2023-11-05 04:27:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2354948805460751, + "acc_stderr": 0.012399451855004748, + "acc_norm": 0.2713310580204778, + "acc_norm_stderr": 0.012993807727545792 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28649671380203146, + "acc_stderr": 0.004512002459757949, + "acc_norm": 0.3359888468432583, + "acc_norm_stderr": 0.004713696694131676 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3269476372924649, + "acc_stderr": 0.016774908180131463, + "acc_norm": 0.3269476372924649, + "acc_norm_stderr": 0.016774908180131463 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.02951319662553935, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.02951319662553935 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004917, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004917 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728743, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728743 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416543, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416543 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136084, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136084 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2743589743589744, + "acc_stderr": 0.022622765767493214, + "acc_norm": 0.2743589743589744, + "acc_norm_stderr": 0.022622765767493214 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.02748054188795359, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.02748054188795359 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.41452991452991456, + "acc_stderr": 0.03227396567623778, + "acc_norm": 0.41452991452991456, + "acc_norm_stderr": 0.03227396567623778 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3132075471698113, + "acc_stderr": 0.02854479331905533, + "acc_norm": 0.3132075471698113, + "acc_norm_stderr": 0.02854479331905533 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541053, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541053 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945287, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945287 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3482587064676617, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.3482587064676617, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624576, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624576 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924034, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924034 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.02577311116963045, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.02577311116963045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24587155963302754, + "acc_stderr": 0.018461940968708457, + "acc_norm": 0.24587155963302754, + "acc_norm_stderr": 0.018461940968708457 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046637, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046637 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.018433427649401903, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.018433427649401903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.0271871270115038, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.0271871270115038 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293649, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293649 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.023529242185193106, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.023529242185193106 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26597131681877445, + "acc_stderr": 0.011285033165551286, + "acc_norm": 0.26597131681877445, + "acc_norm_stderr": 0.011285033165551286 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.0364620496325381, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.0364620496325381 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237038, + "mc2": 0.4626002465688359, + "mc2_stderr": 0.016132004385948653 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23376623376623376, + "acc_stderr": 0.01455078258710312, + "acc_norm": 0.3234946871310508, + "acc_norm_stderr": 0.016083627290483675 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Korabbit/llama-2-ko-7b", + "model_sha": "781e10378a374e3d2ecc7eaa71ffad5d912cc040", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Korabbit/my_model/result_2023-10-24 09:04:19.json b/Korabbit/my_model/result_2023-10-24 09:04:19.json new file mode 100644 index 0000000000000000000000000000000000000000..632575a8526a9fc49f3670d055e32dafd46c7097 --- /dev/null +++ b/Korabbit/my_model/result_2023-10-24 09:04:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946524, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759095 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38767177853017326, + "acc_stderr": 0.004862232790041553, + "acc_norm": 0.5120493925512846, + "acc_norm_stderr": 0.004988332289642081 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161549, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161549 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.42528735632183906, + "acc_stderr": 0.017679225489431447, + "acc_norm": 0.42528735632183906, + "acc_norm_stderr": 0.017679225489431447 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.02802022627120022, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.02802022627120022 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.0362933532994786, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.0362933532994786 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.02755994980234781, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.02755994980234781 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168264, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244443, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244443 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.039417076320648906, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.039417076320648906 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.037932811853078084, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.037932811853078084 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204426, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204426 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.32903225806451614, + "acc_stderr": 0.026729499068349972, + "acc_norm": 0.32903225806451614, + "acc_norm_stderr": 0.026729499068349972 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5341880341880342, + "acc_stderr": 0.03267942734081228, + "acc_norm": 0.5341880341880342, + "acc_norm_stderr": 0.03267942734081228 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.02983280811479601, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.02983280811479601 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.417910447761194, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.417910447761194, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.034564257450869995, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.034564257450869995 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536934, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536934 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.02590663263101613, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.02590663263101613 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02622964917882116, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02622964917882116 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089116, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089116 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.03712454853721368, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.03712454853721368 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3376146788990826, + "acc_stderr": 0.020275265986638914, + "acc_norm": 0.3376146788990826, + "acc_norm_stderr": 0.020275265986638914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.0275300784471103, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.0275300784471103 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.045604560863872344, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.045604560863872344 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351586, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351586 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355445, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355445 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005344, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005344 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260664, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260664 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.02747227447323382, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.02747227447323382 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3551020408163265, + "acc_stderr": 0.030635655150387638, + "acc_norm": 0.3551020408163265, + "acc_norm_stderr": 0.030635655150387638 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29139504563233376, + "acc_stderr": 0.011605720214257605, + "acc_norm": 0.29139504563233376, + "acc_norm_stderr": 0.011605720214257605 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715008, + "mc2": 0.396242471455397, + "mc2_stderr": 0.01500796953934626 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3825265643447462, + "acc_stderr": 0.016709165387228817, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.017189767032130824 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Korabbit/my_model", + "model_sha": "4e31f162c656d46d38fb785707b02628c5ef5965", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.0/result_2023-10-29 22:16:00.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.0/result_2023-10-29 22:16:00.json new file mode 100644 index 0000000000000000000000000000000000000000..b8215aafe7269fe4986aa3958b599fea74422df3 --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.0/result_2023-10-29 22:16:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910471, + "acc_norm": 0.44880546075085326, + "acc_norm_stderr": 0.014534599585097667 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4251145190201155, + "acc_stderr": 0.0049335002616835944, + "acc_norm": 0.5650268870742879, + "acc_norm_stderr": 0.004947402907996247 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.0177478742456836, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.0177478742456836 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933914, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933914 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6018348623853211, + "acc_stderr": 0.02098798942265426, + "acc_norm": 0.6018348623853211, + "acc_norm_stderr": 0.02098798942265426 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171566, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171566 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296559, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296559 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354164, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354164 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.03018753206032938, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.03018753206032938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763127, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763127 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316503, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316503 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.01600265148736101, + "mc2": 0.45982516329816536, + "mc2_stderr": 0.014857750246900359 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46871310507674147, + "acc_stderr": 0.017156666859785463, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.0", + "model_sha": "9d704abe2ee490446c4bea6a94692841bdb92ddb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.1/result_2023-10-30 14:08:29.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.1/result_2023-10-30 14:08:29.json new file mode 100644 index 0000000000000000000000000000000000000000..de7646b43ee622919257a2307e0bc94874f49f21 --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.1/result_2023-10-30 14:08:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.014312094557946704, + "acc_norm": 0.4658703071672355, + "acc_norm_stderr": 0.014577311315231099 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42561242780322645, + "acc_stderr": 0.00493425039087978, + "acc_norm": 0.5683130850428202, + "acc_norm_stderr": 0.004942990623131125 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5568326947637292, + "acc_stderr": 0.017764085035348418, + "acc_norm": 0.5568326947637292, + "acc_norm_stderr": 0.017764085035348418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.03536085947529482, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.03536085947529482 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073835, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073835 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5963302752293578, + "acc_stderr": 0.02103570485657497, + "acc_norm": 0.5963302752293578, + "acc_norm_stderr": 0.02103570485657497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647206, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647206 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003472, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003472 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35919165580182527, + "acc_stderr": 0.01225338618758425, + "acc_norm": 0.35919165580182527, + "acc_norm_stderr": 0.01225338618758425 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237033, + "mc2": 0.42117238466385504, + "mc2_stderr": 0.01460128908268072 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727637, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549346 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.1", + "model_sha": "48bfd4b2fa3fbb12ba5cf4a7b07195f65c998aa7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.2/result_2023-10-31 01:25:10.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.2/result_2023-10-31 01:25:10.json new file mode 100644 index 0000000000000000000000000000000000000000..fa09deb23d91d236cefd1aa6ea4f7bc29801aca8 --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.2/result_2023-10-31 01:25:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40017064846416384, + "acc_stderr": 0.014317197787809169, + "acc_norm": 0.45307167235494883, + "acc_norm_stderr": 0.014546892052005628 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4222266480780721, + "acc_stderr": 0.0049290484827604515, + "acc_norm": 0.566620195180243, + "acc_norm_stderr": 0.004945291270072434 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5389527458492975, + "acc_stderr": 0.01782562179323901, + "acc_norm": 0.5389527458492975, + "acc_norm_stderr": 0.01782562179323901 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.03068302084323101, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.03068302084323101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.035594435655639196, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.035594435655639196 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.025088301454694834, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.025088301454694834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5854700854700855, + "acc_stderr": 0.03227396567623779, + "acc_norm": 0.5854700854700855, + "acc_norm_stderr": 0.03227396567623779 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376896, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376896 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670788, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670788 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.0211624200482735, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.0211624200482735 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486635, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.02872386385328128, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.02872386385328128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.03027332507734575, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.03027332507734575 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.03190080389473236, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.03190080389473236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3670143415906128, + "acc_stderr": 0.01231026424484213, + "acc_norm": 0.3670143415906128, + "acc_norm_stderr": 0.01231026424484213 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606785, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606785 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.01541524174023703, + "mc2": 0.41898060116595187, + "mc2_stderr": 0.014731537822096375 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.016689333596980094 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.2", + "model_sha": "01fd24c5633c041fc150a92d285b67a58aa42d1b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.3/result_2023-10-31 15:28:25.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.3/result_2023-10-31 15:28:25.json new file mode 100644 index 0000000000000000000000000000000000000000..639da7d8276778a1ac22c3f512ecc31d80265dcf --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.3/result_2023-10-31 15:28:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225398, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.014570144495075574 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42780322644891455, + "acc_stderr": 0.004937490199489467, + "acc_norm": 0.5748854809798845, + "acc_norm_stderr": 0.004933500261683598 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.01778403453499242, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.01778403453499242 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.033764582465095665, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.033764582465095665 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5559633027522936, + "acc_stderr": 0.021302621211654518, + "acc_norm": 0.5559633027522936, + "acc_norm_stderr": 0.021302621211654518 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569746, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3604954367666232, + "acc_stderr": 0.012263110237299235, + "acc_norm": 0.3604954367666232, + "acc_norm_stderr": 0.012263110237299235 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006514, + "mc2": 0.43540892594005776, + "mc2_stderr": 0.014927422551655146 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45454545454545453, + "acc_stderr": 0.0171191722080615, + "acc_norm": 0.5985832349468713, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.3", + "model_sha": "3eabc4d02efc859940fd78f95895c10376edfbae", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.4/result_2023-11-01 02:12:17.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.4/result_2023-11-01 02:12:17.json new file mode 100644 index 0000000000000000000000000000000000000000..d738434070d3e7d4388c9eeb42911a943c53232c --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.4/result_2023-11-01 02:12:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43600682593856654, + "acc_stderr": 0.014491225699230914, + "acc_norm": 0.4778156996587031, + "acc_norm_stderr": 0.014597001927076133 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4395538737303326, + "acc_stderr": 0.004953184534223989, + "acc_norm": 0.5835490938060147, + "acc_norm_stderr": 0.0049196263806455115 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865636, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865636 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056126, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056126 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.02520357177302834, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.02520357177302834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836928, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5908256880733945, + "acc_stderr": 0.021080670264433738, + "acc_norm": 0.5908256880733945, + "acc_norm_stderr": 0.021080670264433738 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424513, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424513 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289784, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289784 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.012223623364044043, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.012223623364044043 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.36964504283965727, + "mc1_stderr": 0.0168981807069739, + "mc2": 0.5205477409426235, + "mc2_stderr": 0.01592635844376339 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4817001180637544, + "acc_stderr": 0.017178836639177752, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.4", + "model_sha": "784a5488ff350bcd9fde9d7aff59a0b9988acc2a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.5/result_2023-11-13 07:24:03.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.5/result_2023-11-13 07:24:03.json new file mode 100644 index 0000000000000000000000000000000000000000..e0037af29c65dfe14a16d6cf13be19100ad41749 --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.5/result_2023-11-13 07:24:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4274744027303754, + "acc_stderr": 0.01445686294465065, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947096 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42949611631149176, + "acc_stderr": 0.004939925958728876, + "acc_norm": 0.5841465843457478, + "acc_norm_stderr": 0.004918612098944032 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5542784163473818, + "acc_stderr": 0.017774297282479503, + "acc_norm": 0.5542784163473818, + "acc_norm_stderr": 0.017774297282479503 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.0283332771095628, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.0283332771095628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868543, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868543 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928724, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928724 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0321495214780275, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0321495214780275 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34485006518904826, + "acc_stderr": 0.012139881006287058, + "acc_norm": 0.34485006518904826, + "acc_norm_stderr": 0.012139881006287058 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.038881769216741, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.038881769216741 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31456548347613217, + "mc1_stderr": 0.01625524199317919, + "mc2": 0.46079664403216586, + "mc2_stderr": 0.01602078431352231 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38016528925619836, + "acc_stderr": 0.016689333596980112, + "acc_norm": 0.4510035419126328, + "acc_norm_stderr": 0.017107618859549357 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.5", + "model_sha": "0f783e7c9985f2de65016f414ad026ca7da56ad4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.6/result_2023-11-13 07:25:43.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.6/result_2023-11-13 07:25:43.json new file mode 100644 index 0000000000000000000000000000000000000000..c7e09c62e8737b3bcf4df84008b39ec563ff8d2a --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.6/result_2023-11-13 07:25:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4308873720136519, + "acc_stderr": 0.01447113339264246, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.0146001320759471 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43328022306313485, + "acc_stderr": 0.004945157565218188, + "acc_norm": 0.5933081059549891, + "acc_norm_stderr": 0.004902125388002201 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.017753396973908493, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.017753396973908493 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.03544132491947969, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.03544132491947969 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868547, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868547 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.01965992249362333, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.01965992249362333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.032259413526312945, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.032259413526312945 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163907, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163907 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3435462842242503, + "acc_stderr": 0.012128961174190154, + "acc_norm": 0.3435462842242503, + "acc_norm_stderr": 0.012128961174190154 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32802937576499386, + "mc1_stderr": 0.01643563293281504, + "mc2": 0.46940366768411657, + "mc2_stderr": 0.016167620517601608 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41912632821723733, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.4805194805194805, + "acc_norm_stderr": 0.01717730199234256 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.6", + "model_sha": "8ca05731176451a126cf07e06a97f08e735e21b4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.7/result_2023-12-11 07:43:12.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.7/result_2023-12-11 07:43:12.json new file mode 100644 index 0000000000000000000000000000000000000000..373637bb7b42b89901d13984e81443cba535e498 --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.7/result_2023-12-11 07:43:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41638225255972694, + "acc_stderr": 0.014405618279436178, + "acc_norm": 0.48378839590443684, + "acc_norm_stderr": 0.014603708567414936 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4486158135829516, + "acc_stderr": 0.004963362085275563, + "acc_norm": 0.6018721370244972, + "acc_norm_stderr": 0.00488511646555028 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5568326947637292, + "acc_stderr": 0.01776408503534842, + "acc_norm": 0.5568326947637292, + "acc_norm_stderr": 0.01776408503534842 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840622, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.025230381238934833, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.025230381238934833 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688166, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688166 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.037657466938651504, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.037657466938651504 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047732, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047732 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206184, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206184 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030868, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030868 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685741, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685741 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878645, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878645 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985994, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985994 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763127, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763127 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.03048603938910531, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.03048603938910531 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353593, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353593 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.038154943086889305, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.038154943086889305 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.016466769613698296, + "mc2": 0.5033109991126061, + "mc2_stderr": 0.015408807692069393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.500590318772137, + "acc_stderr": 0.01719034212344859, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.7", + "model_sha": "da2fe170b8fa2c32b922b10cc1f21e74e7fb2395", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-16B/result_2023-12-31 10:15:24.json b/LDCC/LDCC-Instruct-Llama-2-ko-16B/result_2023-12-31 10:15:24.json new file mode 100644 index 0000000000000000000000000000000000000000..1954fa4c4011369adcf850526adb646ece2e5923 --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-16B/result_2023-12-31 10:15:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41552901023890787, + "acc_stderr": 0.014401366641216386, + "acc_norm": 0.492320819112628, + "acc_norm_stderr": 0.014609667440892567 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44214299940250945, + "acc_stderr": 0.004956262919324406, + "acc_norm": 0.6040629356701852, + "acc_norm_stderr": 0.0048805154313231605 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479637, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479637 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376556, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376556 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883233, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883233 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.031426169937919246, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.031426169937919246 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342596, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.02686462436675665, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.02686462436675665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.02779476010500873, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.02779476010500873 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048573, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048573 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319567, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319567 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.02000791273935935, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.02000791273935935 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.044642857142857116, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.044642857142857116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260659, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260659 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841197, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841197 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03068582059661081, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03068582059661081 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.01219814060535359, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.01219814060535359 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34761321909424725, + "mc1_stderr": 0.016670769188897306, + "mc2": 0.5104539931249092, + "mc2_stderr": 0.01608799028808744 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.5159386068476978, + "acc_norm_stderr": 0.017181617837190192 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-16B", + "model_sha": "f7a3f41bb36b1e9b9d894512aa266fd30d4b5298", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.1/result_2023-11-13 12:27:55.json b/LI-ST/Mistral-7B-ko-v0.1/result_2023-11-13 12:27:55.json new file mode 100644 index 0000000000000000000000000000000000000000..3654de047fbe80d249f4268c0708703e91d11697 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.1/result_2023-11-13 12:27:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470137, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427001 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3648675562636925, + "acc_stderr": 0.004804091708812546, + "acc_norm": 0.4743079067914758, + "acc_norm_stderr": 0.0049831897112085155 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.024666744915187232, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.024666744915187232 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849738, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849738 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173092, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173092 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587192, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587192 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269955, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269955 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02855582751652879, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02855582751652879 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475363, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475363 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398864, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.032002553478937816, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.032002553478937816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849648, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849648 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.465162397472841, + "mc2_stderr": 0.015592055613780503 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3919716646989374, + "acc_stderr": 0.016784332119424084, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.017173944474294375 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.1", + "model_sha": "6a56df13013ea478d88ef7b77fde53f594bf1e8d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.10/result_2023-12-15 01:40:06.json b/LI-ST/Mistral-7B-ko-v0.10/result_2023-12-15 01:40:06.json new file mode 100644 index 0000000000000000000000000000000000000000..c44fe30c83739a74a67796caa690e7b61e9912c0 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.10/result_2023-12-15 01:40:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31399317406143346, + "acc_stderr": 0.013562691224726291, + "acc_norm": 0.35409556313993173, + "acc_norm_stderr": 0.013975454122756553 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36068512248556067, + "acc_stderr": 0.004792179052583444, + "acc_norm": 0.45140410276837284, + "acc_norm_stderr": 0.004966158142645413 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4342273307790549, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.4342273307790549, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.02823776942208534, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.02823776942208534 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419994, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419994 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.024838811988033158, + "acc_norm": 0.4, + "acc_norm_stderr": 0.024838811988033158 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220385, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.027941727346256308, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.027941727346256308 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35094339622641507, + "acc_stderr": 0.029373646253234686, + "acc_norm": 0.35094339622641507, + "acc_norm_stderr": 0.029373646253234686 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131147, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131147 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.0355068398916558, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.0355068398916558 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624555, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624555 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668784, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42201834862385323, + "acc_stderr": 0.02117499140776317, + "acc_norm": 0.42201834862385323, + "acc_norm_stderr": 0.02117499140776317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573702, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573702 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331149, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841195, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.012002091666902305, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.012002091666902305 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.037818873532059816, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.037818873532059816 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283331, + "mc2": 0.4259316971970392, + "mc2_stderr": 0.015462913136325425 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38488783943329397, + "acc_stderr": 0.016728579701498665, + "acc_norm": 0.46635182998819363, + "acc_norm_stderr": 0.017151384117131876 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.10", + "model_sha": "b2feae16837ddfa9402366e848700bd25c88b330", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.2/result_2023-11-15 02:42:51.json b/LI-ST/Mistral-7B-ko-v0.2/result_2023-11-15 02:42:51.json new file mode 100644 index 0000000000000000000000000000000000000000..6d42bca4a3cebca979c119c099e944d23cf307f1 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.2/result_2023-11-15 02:42:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470137, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427001 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3648675562636925, + "acc_stderr": 0.004804091708812546, + "acc_norm": 0.4743079067914758, + "acc_norm_stderr": 0.0049831897112085155 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.024666744915187232, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.024666744915187232 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849738, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849738 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173092, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173092 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587192, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587192 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269955, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269955 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02855582751652879, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02855582751652879 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475363, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475363 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398864, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.032002553478937816, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.032002553478937816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849648, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849648 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.465162397472841, + "mc2_stderr": 0.015592055613780503 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3919716646989374, + "acc_stderr": 0.016784332119424084, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.017173944474294375 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.2", + "model_sha": "0a2ba8844a3b7518c29314c775d81937a5c7b4e6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.3/result_2023-11-27 02:32:56.json b/LI-ST/Mistral-7B-ko-v0.3/result_2023-11-27 02:32:56.json new file mode 100644 index 0000000000000000000000000000000000000000..17259f138753bf1d27b46c4ddafb3be05adf2e36 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.3/result_2023-11-27 02:32:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.013697432466693246, + "acc_norm": 0.3848122866894198, + "acc_norm_stderr": 0.01421837106525111 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36735710017924716, + "acc_stderr": 0.004810996652324741, + "acc_norm": 0.469627564230233, + "acc_norm_stderr": 0.004980566907790453 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4495530012771392, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.4495530012771392, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.031709956060406545, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.031709956060406545 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.02823776942208533, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.02823776942208533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134986, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134986 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173078, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009805, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009805 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.02455229220934266, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.02455229220934266 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.042857142857142816, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.042857142857142816 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.018875682938069443, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.018875682938069443 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534785, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534785 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225606, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225606 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195983, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195983 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.03402272044340705, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.03402272044340705 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4518070307573933, + "mc2_stderr": 0.015652737933513572 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4155844155844156, + "acc_stderr": 0.016943586313076575, + "acc_norm": 0.525383707201889, + "acc_norm_stderr": 0.01716818720142926 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.3", + "model_sha": "79cf208351d82dbfb05791f76dfdb7b03c5b8abe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.4/result_2023-11-27 02:45:47.json b/LI-ST/Mistral-7B-ko-v0.4/result_2023-11-27 02:45:47.json new file mode 100644 index 0000000000000000000000000000000000000000..51c31f547415f2d79fec9514a0a9700905a98362 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.4/result_2023-11-27 02:45:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.3703071672354949, + "acc_norm_stderr": 0.01411129875167495 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3505277833100976, + "acc_stderr": 0.004761601303258895, + "acc_norm": 0.4447321250746863, + "acc_norm_stderr": 0.0049592047730462 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.04689765937278135, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.04689765937278135 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.01775880053421442, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.01775880053421442 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307808, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307808 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.02443301646605245, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.02443301646605245 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3967741935483871, + "acc_stderr": 0.02783123160576796, + "acc_norm": 0.3967741935483871, + "acc_norm_stderr": 0.02783123160576796 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3584905660377358, + "acc_stderr": 0.029514703583981765, + "acc_norm": 0.3584905660377358, + "acc_norm_stderr": 0.029514703583981765 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505417, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505417 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083025, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083025 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4925373134328358, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.4925373134328358, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342665, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.02661335084026174, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.02661335084026174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.404320987654321, + "acc_stderr": 0.027306625297327695, + "acc_norm": 0.404320987654321, + "acc_norm_stderr": 0.027306625297327695 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389176, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389176 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03782728980865471, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03782728980865471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.019070985589687495, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.019070985589687495 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265015, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265015 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152573, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152573 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459333, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459333 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.03608541011573967, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.03608541011573967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842883, + "mc2": 0.42266472087084006, + "mc2_stderr": 0.015345191543063135 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3270365997638725, + "acc_stderr": 0.016129047485457022, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.4", + "model_sha": "e19ae536336aadacd842ce5af2542617301421e3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.5/result_2023-11-27 02:45:55.json b/LI-ST/Mistral-7B-ko-v0.5/result_2023-11-27 02:45:55.json new file mode 100644 index 0000000000000000000000000000000000000000..f6f5ab3e06788007ac465a05e44911dec317dd18 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.5/result_2023-11-27 02:45:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.013592431519068082, + "acc_norm": 0.3856655290102389, + "acc_norm_stderr": 0.014224250973257174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36456881099382593, + "acc_stderr": 0.004803253812881047, + "acc_norm": 0.4691296554471221, + "acc_norm_stderr": 0.004980262025472489 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44316730523627074, + "acc_stderr": 0.01776408503534839, + "acc_norm": 0.44316730523627074, + "acc_norm_stderr": 0.01776408503534839 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767766, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767766 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.03005258057955784, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.03005258057955784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606648, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606648 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165582, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165582 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899207, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686934, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686934 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261736, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261736 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44770642201834865, + "acc_stderr": 0.021319754962425462, + "acc_norm": 0.44770642201834865, + "acc_norm_stderr": 0.021319754962425462 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.02835895631342355, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.02835895631342355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706214, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706214 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.0280459469420424, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.0280459469420424 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285714, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.014333522059217892, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.014333522059217892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.0296246635811597, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.0296246635811597 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3005215123859192, + "acc_stderr": 0.011709918883039131, + "acc_norm": 0.3005215123859192, + "acc_norm_stderr": 0.011709918883039131 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03308611113236434, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03308611113236434 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391243, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391243 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766379, + "mc2": 0.44720320938084884, + "mc2_stderr": 0.015529246019817096 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40613931523022434, + "acc_stderr": 0.016884749503191392, + "acc_norm": 0.4639905548996458, + "acc_norm_stderr": 0.017145715365486657 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.5", + "model_sha": "b20a0853eaf043c7271df8b634b0fc5983b70b72", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.6/result_2023-11-28 02:26:55.json b/LI-ST/Mistral-7B-ko-v0.6/result_2023-11-28 02:26:55.json new file mode 100644 index 0000000000000000000000000000000000000000..9008c24d3db5e4f1026e5a161cdae67fdd6af7ab --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.6/result_2023-11-28 02:26:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635474, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759086 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3659629555865365, + "acc_stderr": 0.004807146925162055, + "acc_norm": 0.4735112527384983, + "acc_norm_stderr": 0.004982774293927773 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.0381107966983353, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.0381107966983353 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.017758800534214417, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.017758800534214417 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534446, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534446 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.02801365189199507, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.02801365189199507 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3471698113207547, + "acc_stderr": 0.029300101705549652, + "acc_norm": 0.3471698113207547, + "acc_norm_stderr": 0.029300101705549652 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945284, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945284 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.0276847214156562, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.0276847214156562 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41467889908256883, + "acc_stderr": 0.0211229032086026, + "acc_norm": 0.41467889908256883, + "acc_norm_stderr": 0.0211229032086026 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.044313245019684304, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.044313245019684304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223977, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21675977653631284, + "acc_stderr": 0.013780598486443356, + "acc_norm": 0.21675977653631284, + "acc_norm_stderr": 0.013780598486443356 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031232, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031232 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03168091161233883, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03168091161233883 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.01209559250693197, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.01209559250693197 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.033744993563193555, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.033744993563193555 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512567, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512567 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.4702791160430879, + "mc2_stderr": 0.015650617016562784 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39433293978748524, + "acc_stderr": 0.01680209067489321, + "acc_norm": 0.4757969303423849, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.6", + "model_sha": "bd48b7c993d858d5e9dcf571e72247d303c5497d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.7/result_2023-12-04 02:04:39.json b/LI-ST/Mistral-7B-ko-v0.7/result_2023-12-04 02:04:39.json new file mode 100644 index 0000000000000000000000000000000000000000..ce3599a32e4606ef081523d74cb22c28bc02c667 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.7/result_2023-12-04 02:04:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35665529010238906, + "acc_stderr": 0.013998056902620199, + "acc_norm": 0.4044368600682594, + "acc_norm_stderr": 0.01434203648343617 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36984664409480184, + "acc_stderr": 0.004817763581410233, + "acc_norm": 0.477096195976897, + "acc_norm_stderr": 0.004984543540932337 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4342273307790549, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.4342273307790549, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.02809924077580956, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.02809924077580956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.02489047176993815, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.02489047176993815 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280457, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280457 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718323, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718323 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587193, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587193 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.037336266553835096, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.037336266553835096 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047732, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047732 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.026362437574546545, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.026362437574546545 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.01929196189506638, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.01929196189506638 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.014333522059217892, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.014333522059217892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976694, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.03186785930004128, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.03186785930004128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45569620253164556, + "acc_stderr": 0.03241920684693335, + "acc_norm": 0.45569620253164556, + "acc_norm_stderr": 0.03241920684693335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2796610169491525, + "acc_stderr": 0.011463397393861959, + "acc_norm": 0.2796610169491525, + "acc_norm_stderr": 0.011463397393861959 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.033540924375915195, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.033540924375915195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875837, + "mc2": 0.4469620706076587, + "mc2_stderr": 0.015389336522397358 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3990554899645809, + "acc_stderr": 0.016836377292849296, + "acc_norm": 0.4675324675324675, + "acc_norm_stderr": 0.01715407371668286 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.7", + "model_sha": "2c55047921103b0bebd83ffe967a97f94aa60e02", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.8/result_2023-12-15 01:39:49.json b/LI-ST/Mistral-7B-ko-v0.8/result_2023-12-15 01:39:49.json new file mode 100644 index 0000000000000000000000000000000000000000..550a39f091eac7832015a061d9d3438ae36315a1 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.8/result_2023-12-15 01:39:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3097269624573379, + "acc_stderr": 0.013512058415238361, + "acc_norm": 0.3626279863481229, + "acc_norm_stderr": 0.01404910656495502 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3571001792471619, + "acc_stderr": 0.004781654610857131, + "acc_norm": 0.4563831905994822, + "acc_norm_stderr": 0.004970759774676884 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603676, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603676 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.01781438523853444, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.01781438523853444 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.02815023224453559, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.02815023224453559 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.40404040404040403, + "acc_stderr": 0.03496130972056127, + "acc_norm": 0.40404040404040403, + "acc_norm_stderr": 0.03496130972056127 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908223, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908223 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.027869320571664635, + "acc_norm": 0.4, + "acc_norm_stderr": 0.027869320571664635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972742, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972742 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881564, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881564 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247077, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247077 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4055045871559633, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.4055045871559633, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302884, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302884 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486635, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146292, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146292 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2979591836734694, + "acc_stderr": 0.029279567411065677, + "acc_norm": 0.2979591836734694, + "acc_norm_stderr": 0.029279567411065677 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344202, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344202 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.03393388584958403, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.03393388584958403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323002, + "mc2": 0.47435317492542983, + "mc2_stderr": 0.015496855268461061 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3907910271546635, + "acc_stderr": 0.01677529846510825, + "acc_norm": 0.45690672963400236, + "acc_norm_stderr": 0.017126389093086777 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.8", + "model_sha": "49bb6983b858b53fcd9bcb996bc33feeffc4d8a1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.9/result_2023-12-15 01:39:58.json b/LI-ST/Mistral-7B-ko-v0.9/result_2023-12-15 01:39:58.json new file mode 100644 index 0000000000000000000000000000000000000000..c91cab077f69cbf15c68ef105e405b4d72d6e0fa --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.9/result_2023-12-15 01:39:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.013406741767847636, + "acc_norm": 0.3464163822525597, + "acc_norm_stderr": 0.013905011180063244 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35132443736307506, + "acc_stderr": 0.0047640845971769034, + "acc_norm": 0.4510057757418841, + "acc_norm_stderr": 0.004965768348628059 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913236, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4495530012771392, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.4495530012771392, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122155, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122155 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.03521224908841583, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.03521224908841583 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199596, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815632, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815632 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.036812296333943194, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.036812296333943194 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261837, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261837 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379417, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379417 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0383515395439942, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0383515395439942 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44587155963302755, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.44587155963302755, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.0282135041778241, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.0282135041778241 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331149, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.028245687391462913, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.028245687391462913 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195974, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195974 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945432, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945432 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396715, + "mc2": 0.4518577671193954, + "mc2_stderr": 0.015379505911432577 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.01718027524608563 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.9", + "model_sha": "c2ede85533e0895505871be87fc34c1906433304", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-Jihun/Mistral-11B-OP-u1k-ver0.7/result_2023-11-01 02:26:39.json b/MNC-Jihun/Mistral-11B-OP-u1k-ver0.7/result_2023-11-01 02:26:39.json new file mode 100644 index 0000000000000000000000000000000000000000..88abf96728e14a928278a47577ce14bc5403b820 --- /dev/null +++ b/MNC-Jihun/Mistral-11B-OP-u1k-ver0.7/result_2023-11-01 02:26:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34726962457337884, + "acc_stderr": 0.013913034529620451, + "acc_norm": 0.39078498293515357, + "acc_norm_stderr": 0.01425856388051378 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3799044015136427, + "acc_stderr": 0.00484370855038653, + "acc_norm": 0.4960167297351125, + "acc_norm_stderr": 0.004989623068778803 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.017841995750520857, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.017841995750520857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755292, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755292 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126177, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126177 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985754, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985754 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389174, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389174 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823018, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635903, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.02746470844202213, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.02746470844202213 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966339, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966339 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877743, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.012020128195985757, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.012020128195985757 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.01627228795791694, + "mc2": 0.49501961999008254, + "mc2_stderr": 0.01565387033555305 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5029515938606848, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-Jihun/Mistral-11B-OP-u1k-ver0.7", + "model_sha": "c5549370a409724d0d5c4a4b071cb2b5aa85c184", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-Jihun/Mistral-11B-Omni-OP-u1k-ver0.5/result_2023-10-30 03:47:03.json b/MNC-Jihun/Mistral-11B-Omni-OP-u1k-ver0.5/result_2023-10-30 03:47:03.json new file mode 100644 index 0000000000000000000000000000000000000000..0adc67fa897fac2e9bd44d6632ed858bb45de735 --- /dev/null +++ b/MNC-Jihun/Mistral-11B-Omni-OP-u1k-ver0.5/result_2023-10-30 03:47:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.013928933461382494, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735567 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37502489543915557, + "acc_stderr": 0.004831399218500244, + "acc_norm": 0.47849034056960765, + "acc_norm_stderr": 0.00498516207433611 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693353, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.028256660723360184, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.028256660723360184 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.03286745312567961, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.03286745312567961 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.02486499515976776, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.02486499515976776 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.0213704946099951, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.0213704946099951 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.044313245019684304, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.044313245019684304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.01948802574552967, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.01948802574552967 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966346, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966346 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29921773142112124, + "acc_stderr": 0.011695374630696047, + "acc_norm": 0.29921773142112124, + "acc_norm_stderr": 0.011695374630696047 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608742, + "mc2": 0.47047609010515296, + "mc2_stderr": 0.016013828931677482 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4025974025974026, + "acc_stderr": 0.016861020486407776, + "acc_norm": 0.42384887839433294, + "acc_norm_stderr": 0.016989810834628253 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-Jihun/Mistral-11B-Omni-OP-u1k-ver0.5", + "model_sha": "8c58d63d92483624ec8b73e6b3ba93338d1abf86", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-Jihun/Mistral-7B-A-u0.5-b2-ver0.4/result_2023-10-31 05:51:25.json b/MNC-Jihun/Mistral-7B-A-u0.5-b2-ver0.4/result_2023-10-31 05:51:25.json new file mode 100644 index 0000000000000000000000000000000000000000..2e92149702faf454b20f14dc8a4ddc7b8393ac71 --- /dev/null +++ b/MNC-Jihun/Mistral-7B-A-u0.5-b2-ver0.4/result_2023-10-31 05:51:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3242320819112628, + "acc_stderr": 0.013678810399518815, + "acc_norm": 0.3771331058020478, + "acc_norm_stderr": 0.01416336689619259 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3686516630153356, + "acc_stderr": 0.00481453264257465, + "acc_norm": 0.4765982871937861, + "acc_norm_stderr": 0.004984313205791442 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44572158365261816, + "acc_stderr": 0.0177742972824795, + "acc_norm": 0.44572158365261816, + "acc_norm_stderr": 0.0177742972824795 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.04161808503501528, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.04161808503501528 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.035056301407857426, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.035056301407857426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681855, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681855 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857403, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857403 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656206, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656206 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362233, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362233 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401147, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401147 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653063, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653063 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3307262569832402, + "acc_stderr": 0.01573502625896612, + "acc_norm": 0.3307262569832402, + "acc_norm_stderr": 0.01573502625896612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.02997280717046463, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.02997280717046463 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.47266598912504365, + "mc2_stderr": 0.015392669159401157 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.01718976703213082, + "acc_norm": 0.5501770956316411, + "acc_norm_stderr": 0.01710357334382571 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-Jihun/Mistral-7B-A-u0.5-b2-ver0.4", + "model_sha": "2274c77af5e028132156c1737de2a39d39bbff01", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-Jihun/Mistral-7B-AO-u0.5-b2-ver0.4/result_2023-10-31 05:52:02.json b/MNC-Jihun/Mistral-7B-AO-u0.5-b2-ver0.4/result_2023-10-31 05:52:02.json new file mode 100644 index 0000000000000000000000000000000000000000..074081c4d6ae6f22b544060888c8541cd68d34e6 --- /dev/null +++ b/MNC-Jihun/Mistral-7B-AO-u0.5-b2-ver0.4/result_2023-10-31 05:52:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.013847460518892978, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.01425295984889289 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37621987651862177, + "acc_stderr": 0.004834461997944863, + "acc_norm": 0.4880501892053376, + "acc_norm_stderr": 0.004988356146499017 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468544, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4967741935483871, + "acc_stderr": 0.02844341422643833, + "acc_norm": 0.4967741935483871, + "acc_norm_stderr": 0.02844341422643833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857403, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857403 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520193, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520193 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539277, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539277 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04426266681379909, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04426266681379909 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.01999797303545833, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.01999797303545833 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044811, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044811 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.36089385474860336, + "acc_stderr": 0.016062290671110476, + "acc_norm": 0.36089385474860336, + "acc_norm_stderr": 0.016062290671110476 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.03175195237583323, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.03175195237583323 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33572359843546284, + "acc_stderr": 0.012061304157664626, + "acc_norm": 0.33572359843546284, + "acc_norm_stderr": 0.012061304157664626 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.4785073157857354, + "mc2_stderr": 0.015443979160746298 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.51357733175915, + "acc_stderr": 0.01718401506040146, + "acc_norm": 0.5761511216056671, + "acc_norm_stderr": 0.01698981083462825 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-Jihun/Mistral-7B-AO-u0.5-b2-ver0.4", + "model_sha": "de25cb8c3f247d1b0ce3189b9ee3595db7dbbe1f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-Jihun/Mistral-7B-OP-u1k-ver0.6/result_2023-10-30 03:47:23.json b/MNC-Jihun/Mistral-7B-OP-u1k-ver0.6/result_2023-10-30 03:47:23.json new file mode 100644 index 0000000000000000000000000000000000000000..7272b31b42918f942c2a9990c80c870a549e3ca2 --- /dev/null +++ b/MNC-Jihun/Mistral-7B-OP-u1k-ver0.6/result_2023-10-30 03:47:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145687, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.014194389086685265 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3619796853216491, + "acc_stderr": 0.004795908282584555, + "acc_norm": 0.44761999601672975, + "acc_norm_stderr": 0.0049623252978409915 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.01785298126663395, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.01785298126663395 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071722, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071722 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830524, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830524 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568385, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987053, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987053 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159665, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159665 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115979, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115979 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.02748747298087159, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.02748747298087159 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734577, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734577 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.01945076843250551, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.01945076843250551 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.033448873829978666, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.033448873829978666 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976276, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976276 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163908, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163908 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3155149934810952, + "acc_stderr": 0.011869184843058642, + "acc_norm": 0.3155149934810952, + "acc_norm_stderr": 0.011869184843058642 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.038517163193983954, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.038517163193983954 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589667, + "mc2": 0.46872875951621523, + "mc2_stderr": 0.01631020915826667 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3624557260920897, + "acc_stderr": 0.016527131240453716, + "acc_norm": 0.3825265643447462, + "acc_norm_stderr": 0.016709165387228817 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-Jihun/Mistral-7B-OP-u1k-ver0.6", + "model_sha": "23c7a5ec9de97c7c729fb2d9dc76bba8f6cb3406", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-Jihun/Mistral-7B-OP-u1k-ver0.7/result_2023-10-31 01:07:15.json b/MNC-Jihun/Mistral-7B-OP-u1k-ver0.7/result_2023-10-31 01:07:15.json new file mode 100644 index 0000000000000000000000000000000000000000..14feb06a76cfd90f391d4b2784ac48019d901b59 --- /dev/null +++ b/MNC-Jihun/Mistral-7B-OP-u1k-ver0.7/result_2023-10-31 01:07:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3506825938566553, + "acc_stderr": 0.013944635930726092, + "acc_norm": 0.40273037542662116, + "acc_norm_stderr": 0.014332236306790138 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37880900219079866, + "acc_stderr": 0.004840990593494688, + "acc_norm": 0.49830711013742285, + "acc_norm_stderr": 0.004989752811173411 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.017841995750520857, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.017841995750520857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412202, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412202 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681848, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681848 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804723, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804723 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137282, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137282 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756663, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756663 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635913, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635913 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887865, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887865 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502326, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502326 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3486033519553073, + "acc_stderr": 0.015937484656687022, + "acc_norm": 0.3486033519553073, + "acc_norm_stderr": 0.015937484656687022 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.03175195237583324, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.03175195237583324 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.03172295004332331, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.03172295004332331 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540479, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540479 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015473, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015473 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.0158663464013843, + "mc2": 0.48509410722375507, + "mc2_stderr": 0.015448476334612172 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5182998819362455, + "acc_stderr": 0.017178836639177755, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.017052633559856065 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-Jihun/Mistral-7B-OP-u1k-ver0.7", + "model_sha": "d6e5e9f3245ff8beba92c77a0cedcfbb5eb8798f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-LLM/Mistral-11B-Omni-OPA-u1k-ver0.7/result_2023-11-02 01:16:59.json b/MNC-LLM/Mistral-11B-Omni-OPA-u1k-ver0.7/result_2023-11-02 01:16:59.json new file mode 100644 index 0000000000000000000000000000000000000000..8770d38f30deeea5391b9e3100c38e2486734e4a --- /dev/null +++ b/MNC-LLM/Mistral-11B-Omni-OPA-u1k-ver0.7/result_2023-11-02 01:16:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497724, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892893 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37801234813782114, + "acc_stderr": 0.004838997427699758, + "acc_norm": 0.4923322047400916, + "acc_norm_stderr": 0.004989194627707854 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.017841995750520857, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.017841995750520857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082635, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082635 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.025254485424799605, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.025254485424799605 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985754, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985754 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823018, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.02497695405315525, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.02497695405315525 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442203, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442203 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537318, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537318 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400355, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750187, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750187 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.033981108902946366, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.033981108902946366 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.01461446582196634, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.01461446582196634 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.02997280717046463, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.02997280717046463 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.01202012819598576, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.01202012819598576 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.4911572350172599, + "mc2_stderr": 0.015610028118935604 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5159386068476978, + "acc_stderr": 0.017181617837190192, + "acc_norm": 0.5430932703659976, + "acc_norm_stderr": 0.01712638909308678 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-LLM/Mistral-11B-Omni-OPA-u1k-ver0.7", + "model_sha": "b191a814d7f0ab540eaa36f8f6ca4c189e4d3a5f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-LLM/Mistral-7B-O3k-Au1k-ver0.7/result_2023-11-01 05:18:23.json b/MNC-LLM/Mistral-7B-O3k-Au1k-ver0.7/result_2023-11-01 05:18:23.json new file mode 100644 index 0000000000000000000000000000000000000000..1ebc4971704dc45c9247a9ea12f8cd08ad5551e1 --- /dev/null +++ b/MNC-LLM/Mistral-7B-O3k-Au1k-ver0.7/result_2023-11-01 05:18:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.01389693846114568, + "acc_norm": 0.3890784982935154, + "acc_norm_stderr": 0.014247309976045605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3761202947619996, + "acc_stderr": 0.004834207964061324, + "acc_norm": 0.48834893447520417, + "acc_norm_stderr": 0.004988426528513012 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468544, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520193, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520193 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.040260970832965585, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.040260970832965585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829156, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829156 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.34413407821229053, + "acc_stderr": 0.015889221313307094, + "acc_norm": 0.34413407821229053, + "acc_norm_stderr": 0.015889221313307094 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767105, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.01600265148736101, + "mc2": 0.47256825783555356, + "mc2_stderr": 0.015562189062650065 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5171192443919717, + "acc_stderr": 0.01718027524608563, + "acc_norm": 0.5525383707201889, + "acc_norm_stderr": 0.017095190301500578 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-LLM/Mistral-7B-O3k-Au1k-ver0.7", + "model_sha": "99abb58ee6efae9e5cdc9bc427c79bc4a7b6f1a2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJ1hun/Dolphin-Mistral-7B-OP-u1k-ver0.1/result_2023-10-28 16:43:18.json b/MNCJ1hun/Dolphin-Mistral-7B-OP-u1k-ver0.1/result_2023-10-28 16:43:18.json new file mode 100644 index 0000000000000000000000000000000000000000..1b6b05a8ecf43f2c3e6629f7eb60b1e6947a27c6 --- /dev/null +++ b/MNCJ1hun/Dolphin-Mistral-7B-OP-u1k-ver0.1/result_2023-10-28 16:43:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.01397545412275656, + "acc_norm": 0.40784982935153585, + "acc_norm_stderr": 0.014361097288449686 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38189603664608646, + "acc_stderr": 0.0048485832436066904, + "acc_norm": 0.49661422027484564, + "acc_norm_stderr": 0.004989667009372637 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.01783579880629064, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.01783579880629064 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416546, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416546 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017838, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017838 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.03035152732334494, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.03035152732334494 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.03070948699255654, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.03070948699255654 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699954, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686934, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686934 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.01948802574552966, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.01948802574552966 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.0141022236231526, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.0141022236231526 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503318, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3268053855569155, + "mc1_stderr": 0.016419874731135035, + "mc2": 0.4937623805683608, + "mc2_stderr": 0.015810468549274707 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4344746162927981, + "acc_stderr": 0.017042098620824928, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJ1hun/Dolphin-Mistral-7B-OP-u1k-ver0.1", + "model_sha": "4790deb15d0c30a0a8728d8f8419e1694c21eb1a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJ1hun/MIstral-11B-Omni-OP-1k-2048-ver0.1/result_2023-10-29 00:22:07.json b/MNCJ1hun/MIstral-11B-Omni-OP-1k-2048-ver0.1/result_2023-10-29 00:22:07.json new file mode 100644 index 0000000000000000000000000000000000000000..cfa4747ccfd0968523fa804f074739c8df031c9f --- /dev/null +++ b/MNCJ1hun/MIstral-11B-Omni-OP-1k-2048-ver0.1/result_2023-10-29 00:22:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.01404195794503808, + "acc_norm": 0.41552901023890787, + "acc_norm_stderr": 0.014401366641216395 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38179645488946423, + "acc_stderr": 0.00484834156049215, + "acc_norm": 0.4947221668990241, + "acc_norm_stderr": 0.004989503417767287 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465554, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.0279404571362284, + "acc_norm": 0.3, + "acc_norm_stderr": 0.0279404571362284 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607715, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607715 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362223, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362223 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017087, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017087 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02855582751652879, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02855582751652879 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759426, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759426 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21675977653631284, + "acc_stderr": 0.013780598486443354, + "acc_norm": 0.21675977653631284, + "acc_norm_stderr": 0.013780598486443354 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411952, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411952 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842977, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842977 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.4663054587466787, + "mc2_stderr": 0.015613323568757127 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342547, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.01718401506040145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJ1hun/MIstral-11B-Omni-OP-1k-2048-ver0.1", + "model_sha": "a64bcca1371fa2285981fc40dbd8b879857f1e2e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJ1hun/MIstral-11B-Omni-OP-u1k-ver0.1/result_2023-10-29 00:20:22.json b/MNCJ1hun/MIstral-11B-Omni-OP-u1k-ver0.1/result_2023-10-29 00:20:22.json new file mode 100644 index 0000000000000000000000000000000000000000..1d6ae20595f9f6b0e9daf55bb18f375326d8e991 --- /dev/null +++ b/MNCJ1hun/MIstral-11B-Omni-OP-u1k-ver0.1/result_2023-10-29 00:20:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3575085324232082, + "acc_stderr": 0.014005494275916576, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38279227245568614, + "acc_stderr": 0.004850748687859933, + "acc_norm": 0.4874526986656045, + "acc_norm_stderr": 0.004988210033832016 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4623243933588761, + "acc_stderr": 0.017829131764287198, + "acc_norm": 0.4623243933588761, + "acc_norm_stderr": 0.017829131764287198 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.032025630761017346, + "acc_norm": 0.4, + "acc_norm_stderr": 0.032025630761017346 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307807, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307807 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.02508830145469484, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.02508830145469484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173095, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173095 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556538, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556538 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.03461199429040013, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.03461199429040013 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0242785680243077, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0242785680243077 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369818, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369818 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.02141099975363592, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.02141099975363592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171573, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171573 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.019706875804085627, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.019706875804085627 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.013754835975482355, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.013754835975482355 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31747066492829207, + "acc_stderr": 0.01188889206880931, + "acc_norm": 0.31747066492829207, + "acc_norm_stderr": 0.01188889206880931 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394823, + "mc2": 0.47510378175366297, + "mc2_stderr": 0.015686785961170725 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4297520661157025, + "acc_stderr": 0.017019847535972205, + "acc_norm": 0.48642266824085006, + "acc_norm_stderr": 0.017184015060401448 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJ1hun/MIstral-11B-Omni-OP-u1k-ver0.1", + "model_sha": "3cf7eb4c014f181bec2a9b36897771b2710422d1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJ1hun/Mistral-7B-OP-u1k-ver0.4/result_2023-10-29 12:13:24.json b/MNCJ1hun/Mistral-7B-OP-u1k-ver0.4/result_2023-10-29 12:13:24.json new file mode 100644 index 0000000000000000000000000000000000000000..c2b93f10f3ad33b439e1e8e611543aa5371d09e1 --- /dev/null +++ b/MNCJ1hun/Mistral-7B-OP-u1k-ver0.4/result_2023-10-29 12:13:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145685, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37860983867755427, + "acc_stderr": 0.004840493603166214, + "acc_norm": 0.49482174865564627, + "acc_norm_stderr": 0.004989513809408586 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693353, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841585, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841585 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983042, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983042 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.0250107491161376, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.0250107491161376 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022902, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022902 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978252, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978252 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750186, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750186 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321616, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321616 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3396648044692737, + "acc_stderr": 0.01583940040621249, + "acc_norm": 0.3396648044692737, + "acc_norm_stderr": 0.01583940040621249 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03025437257397669, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03025437257397669 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666544, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666544 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015473, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015473 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.039025510073744496, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.039025510073744496 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.48626023725218265, + "mc2_stderr": 0.015456180399303063 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.51357733175915, + "acc_stderr": 0.01718401506040146, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.017090852631668332 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJ1hun/Mistral-7B-OP-u1k-ver0.4", + "model_sha": "584915ea3f453b6771b188b11629e859473e7e9d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJ1hun/Mistral-7B-OP-u1k-ver0.5/result_2023-10-29 12:13:11.json b/MNCJ1hun/Mistral-7B-OP-u1k-ver0.5/result_2023-10-29 12:13:11.json new file mode 100644 index 0000000000000000000000000000000000000000..f1fb50a17277b1039f7b0a6c6cdd9fc59284c986 --- /dev/null +++ b/MNCJ1hun/Mistral-7B-OP-u1k-ver0.5/result_2023-10-29 12:13:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3506825938566553, + "acc_stderr": 0.013944635930726085, + "acc_norm": 0.3993174061433447, + "acc_norm_stderr": 0.014312094557946704 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38099980083648677, + "acc_stderr": 0.004846400325585238, + "acc_norm": 0.485062736506672, + "acc_norm_stderr": 0.004987554255981858 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44316730523627074, + "acc_stderr": 0.01776408503534839, + "acc_norm": 0.44316730523627074, + "acc_norm_stderr": 0.01776408503534839 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0438986995680878, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0438986995680878 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.57, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739428, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739428 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159664, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.01954210156485412, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.01954210156485412 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.046355501356099754, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.046355501356099754 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103987, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103987 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.011787910251664594, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.011787910251664594 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4662275406128657, + "mc2_stderr": 0.015931307436185087 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4132231404958678, + "acc_stderr": 0.016929480234495226, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.017090852631668336 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJ1hun/Mistral-7B-OP-u1k-ver0.5", + "model_sha": "3ccdca4afa332d805c50ffbaaa84cd8fa8b9ebe1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJ1hun/Zephyr-7B-alpha-OP-u1k-ver0.1/result_2023-10-29 00:21:54.json b/MNCJ1hun/Zephyr-7B-alpha-OP-u1k-ver0.1/result_2023-10-29 00:21:54.json new file mode 100644 index 0000000000000000000000000000000000000000..af5c1ec1665b6861cdc88048169842cfbe73afa6 --- /dev/null +++ b/MNCJ1hun/Zephyr-7B-alpha-OP-u1k-ver0.1/result_2023-10-29 00:21:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840055, + "acc_norm": 0.41723549488054607, + "acc_norm_stderr": 0.014409825518403082 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38149770961959767, + "acc_stderr": 0.00484761521647345, + "acc_norm": 0.4923322047400916, + "acc_norm_stderr": 0.0049891946277078525 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468547, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998573, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998573 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.03035152732334493, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.03035152732334493 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.02432631052914913, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.02432631052914913 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.36728395061728397, + "acc_stderr": 0.026822801759507894, + "acc_norm": 0.36728395061728397, + "acc_norm_stderr": 0.026822801759507894 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.02143642095552942, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.02143642095552942 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.01954210156485412, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.01954210156485412 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639886, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291521, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291521 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961459, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961459 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125468, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125468 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29921773142112124, + "acc_stderr": 0.011695374630696052, + "acc_norm": 0.29921773142112124, + "acc_norm_stderr": 0.011695374630696052 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219371, + "mc2": 0.45601808163931185, + "mc2_stderr": 0.015622209231910858 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40731995277449823, + "acc_stderr": 0.01689245669519127, + "acc_norm": 0.4604486422668241, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJ1hun/Zephyr-7B-alpha-OP-u1k-ver0.1", + "model_sha": "7692de676eb6a3561d10a21a64bcf45cc629665b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined/result_2023-10-24 01:28:22.json b/MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined/result_2023-10-24 01:28:22.json new file mode 100644 index 0000000000000000000000000000000000000000..3bccf879ace5823d26407a014943723b2d3cdfc0 --- /dev/null +++ b/MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined/result_2023-10-24 01:28:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2721843003412969, + "acc_stderr": 0.013006600406423709, + "acc_norm": 0.32849829351535836, + "acc_norm_stderr": 0.013724978465537377 + }, + "harness|ko_hellaswag|10": { + "acc": 0.345947022505477, + "acc_stderr": 0.00474703876817253, + "acc_norm": 0.42362079267078273, + "acc_norm_stderr": 0.004931219148182244 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.047504583990416925, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.047504583990416925 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4163473818646232, + "acc_stderr": 0.017627948030430298, + "acc_norm": 0.4163473818646232, + "acc_norm_stderr": 0.017627948030430298 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.033586181457325226, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.033586181457325226 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185553, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185553 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931673, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931673 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602357, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602357 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.027869320571664632, + "acc_norm": 0.4, + "acc_norm_stderr": 0.027869320571664632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.03265903381186195, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.03265903381186195 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.0295822451283843, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.0295822451283843 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425464, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425464 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43781094527363185, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.43781094527363185, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028428, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028428 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.02494679222527231, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.02494679222527231 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02712511551316687, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02712511551316687 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3779816513761468, + "acc_stderr": 0.020789187066728113, + "acc_norm": 0.3779816513761468, + "acc_norm_stderr": 0.020789187066728113 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871137, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871137 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110317, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110317 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.0387813988879761, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.0387813988879761 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.018875682938069443, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.018875682938069443 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000533, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000533 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966342, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966342 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146293, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146293 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.30612244897959184, + "acc_stderr": 0.029504896454595968, + "acc_norm": 0.30612244897959184, + "acc_norm_stderr": 0.029504896454595968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4388185654008439, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.4388185654008439, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048231, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.03364487286088299, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.03364487286088299 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.037425970438065864, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.037425970438065864 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.47535947414675184, + "mc2_stderr": 0.015845184891705482 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22195985832349469, + "acc_stderr": 0.014287394616821172, + "acc_norm": 0.2668240850059032, + "acc_norm_stderr": 0.015206575684565883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined", + "model_sha": "5f5dac05ae42c508810fe2dc7d4eef1350c3a1b2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined/result_2023-10-24 01:03:10.json b/MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined/result_2023-10-24 01:03:10.json new file mode 100644 index 0000000000000000000000000000000000000000..beb317e4f22f8306d6ca044f2fd1ec97f622d238 --- /dev/null +++ b/MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined/result_2023-10-24 01:03:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.295221843003413, + "acc_stderr": 0.013329750293382316, + "acc_norm": 0.3378839590443686, + "acc_norm_stderr": 0.013822047922283516 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3476399123680542, + "acc_stderr": 0.004752476997887829, + "acc_norm": 0.434973112925712, + "acc_norm_stderr": 0.004947402907996247 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.0484674825397724, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.0484674825397724 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4112388250319285, + "acc_stderr": 0.017595971908056573, + "acc_norm": 0.4112388250319285, + "acc_norm_stderr": 0.017595971908056573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596241, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596241 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085335, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085335 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330313, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330313 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.03979236637497411, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.03979236637497411 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.0242831405294673, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.0242831405294673 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280457, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280457 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5854700854700855, + "acc_stderr": 0.0322739656762378, + "acc_norm": 0.5854700854700855, + "acc_norm_stderr": 0.0322739656762378 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432118, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432118 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606648, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606648 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.46766169154228854, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.46766169154228854, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762606, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762606 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41329479768786126, + "acc_stderr": 0.02651126136940924, + "acc_norm": 0.41329479768786126, + "acc_norm_stderr": 0.02651126136940924 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470021, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470021 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271233, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271233 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858855, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858855 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3871559633027523, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.3871559633027523, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.042857142857142816, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.042857142857142816 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.018771683893528183, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.018771683893528183 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536048, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536048 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22793296089385476, + "acc_stderr": 0.014030149950805097, + "acc_norm": 0.22793296089385476, + "acc_norm_stderr": 0.014030149950805097 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.02736586113151381, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.02736586113151381 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788153, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788153 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.011901895635786088, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.011901895635786088 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.0327028718148208, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.0327028718148208 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.4600089007139919, + "mc2_stderr": 0.015856276729730875 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24321133412042503, + "acc_stderr": 0.014750068360453263, + "acc_norm": 0.2798110979929162, + "acc_norm_stderr": 0.015433715795427778 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined", + "model_sha": "a9340fcc369bba2e0200a3a378078fa14f4075b3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-SlimOrca-orca-platy-1k/result_2023-10-23 07:44:01.json b/MNCJihun/Mistral-7B-SlimOrca-orca-platy-1k/result_2023-10-23 07:44:01.json new file mode 100644 index 0000000000000000000000000000000000000000..d6ba16aac915bfe1b99cfbf54aa697633e0be650 --- /dev/null +++ b/MNCJihun/Mistral-7B-SlimOrca-orca-platy-1k/result_2023-10-23 07:44:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28754266211604096, + "acc_stderr": 0.013226719056266127, + "acc_norm": 0.3395904436860068, + "acc_norm_stderr": 0.01383903976282016 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36297550288787095, + "acc_stderr": 0.004798751281560832, + "acc_norm": 0.45558653654650466, + "acc_norm_stderr": 0.004970057183367319 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.01757070523925654, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.01757070523925654 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380056, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380056 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3665594855305466, + "acc_stderr": 0.02736807824397163, + "acc_norm": 0.3665594855305466, + "acc_norm_stderr": 0.02736807824397163 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2914798206278027, + "acc_stderr": 0.03050028317654591, + "acc_norm": 0.2914798206278027, + "acc_norm_stderr": 0.03050028317654591 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868568, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868568 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.033661244890514495, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.033661244890514495 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.35161290322580646, + "acc_stderr": 0.027162537826948458, + "acc_norm": 0.35161290322580646, + "acc_norm_stderr": 0.027162537826948458 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5512820512820513, + "acc_stderr": 0.032583346493868806, + "acc_norm": 0.5512820512820513, + "acc_norm_stderr": 0.032583346493868806 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.42786069651741293, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.42786069651741293, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0365634365335316, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0365634365335316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983053, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983053 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3208092485549133, + "acc_stderr": 0.025131000233647907, + "acc_norm": 0.3208092485549133, + "acc_norm_stderr": 0.025131000233647907 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3765432098765432, + "acc_stderr": 0.02695934451874779, + "acc_norm": 0.3765432098765432, + "acc_norm_stderr": 0.02695934451874779 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.03606065001832917, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.03606065001832917 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768176, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768176 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.47107438016528924, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849727, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849727 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.017883188134667192, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.017883188134667192 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952685, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952685 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409167, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409167 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556163, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.41350210970464135, + "acc_stderr": 0.03205649904851859, + "acc_norm": 0.41350210970464135, + "acc_norm_stderr": 0.03205649904851859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03283472056108567, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03283472056108567 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.03742597043806586, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.03742597043806586 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.01607750926613303, + "mc2": 0.4664193395730685, + "mc2_stderr": 0.015885964841438872 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.345926800472255, + "acc_stderr": 0.016353853414347575, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.016929480234495226 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihun/Mistral-7B-SlimOrca-orca-platy-1k", + "model_sha": "96fceca38b3714b0ae8ec6dc120f13036eaeb69c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-eng-kor-cot-combined/result_2023-10-24 01:05:12.json b/MNCJihun/Mistral-7B-eng-kor-cot-combined/result_2023-10-24 01:05:12.json new file mode 100644 index 0000000000000000000000000000000000000000..03851aa9d089b62ce37b769ee184e26e5cc87f2a --- /dev/null +++ b/MNCJihun/Mistral-7B-eng-kor-cot-combined/result_2023-10-24 01:05:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28668941979522183, + "acc_stderr": 0.01321498632927476, + "acc_norm": 0.33447098976109213, + "acc_norm_stderr": 0.013787460322441374 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34096793467436765, + "acc_stderr": 0.004730658073041555, + "acc_norm": 0.4268074088826927, + "acc_norm_stderr": 0.004936029827672039 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.36257309941520466, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.36257309941520466, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.046202840822800406, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.046202840822800406 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3997445721583653, + "acc_stderr": 0.01751684790705327, + "acc_norm": 0.3997445721583653, + "acc_norm_stderr": 0.01751684790705327 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380045, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380045 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288085, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288085 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484504, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484504 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138622, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138622 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237653, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.023901157979402538, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.023901157979402538 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293753, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293753 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.32903225806451614, + "acc_stderr": 0.02672949906834996, + "acc_norm": 0.32903225806451614, + "acc_norm_stderr": 0.02672949906834996 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5042735042735043, + "acc_stderr": 0.03275489264382132, + "acc_norm": 0.5042735042735043, + "acc_norm_stderr": 0.03275489264382132 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695248, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695248 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228412, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228412 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43283582089552236, + "acc_stderr": 0.0350349092367328, + "acc_norm": 0.43283582089552236, + "acc_norm_stderr": 0.0350349092367328 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.024026846392873502, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.024026846392873502 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999998, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999998 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3439306358381503, + "acc_stderr": 0.025574123786546648, + "acc_norm": 0.3439306358381503, + "acc_norm_stderr": 0.025574123786546648 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.03680350371286461, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.03680350371286461 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.026869490744815247, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.026869490744815247 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3651376146788991, + "acc_stderr": 0.020642801454383995, + "acc_norm": 0.3651376146788991, + "acc_norm_stderr": 0.020642801454383995 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.026787453111906532, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.026787453111906532 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093085, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093085 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952688, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952688 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456052, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456052 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22793296089385476, + "acc_stderr": 0.014030149950805097, + "acc_norm": 0.22793296089385476, + "acc_norm_stderr": 0.014030149950805097 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225418, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225418 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.030472526026726492, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.030472526026726492 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4219409282700422, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.4219409282700422, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29986962190352023, + "acc_stderr": 0.011702660860193989, + "acc_norm": 0.29986962190352023, + "acc_norm_stderr": 0.011702660860193989 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.46556936650012803, + "mc2_stderr": 0.01608055615378503 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.20188902007083825, + "acc_stderr": 0.01380075389577743, + "acc_norm": 0.21959858323494688, + "acc_norm_stderr": 0.014232743085580275 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihun/Mistral-7B-eng-kor-cot-combined", + "model_sha": "ad4d7c60244d0f1e0cc11d44be9b14c3354df448", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4/result_2023-10-23 06:43:50.json b/MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4/result_2023-10-23 06:43:50.json new file mode 100644 index 0000000000000000000000000000000000000000..10b174279193daba664e736aec18b78fb1b15fbf --- /dev/null +++ b/MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4/result_2023-10-23 06:43:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2960750853242321, + "acc_stderr": 0.013340916085246263, + "acc_norm": 0.3319112627986348, + "acc_norm_stderr": 0.01376098820088054 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36367257518422624, + "acc_stderr": 0.004800728138792386, + "acc_norm": 0.4591714797849034, + "acc_norm_stderr": 0.004973117975062484 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781169, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781169 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465568, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465568 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.036807836907275814, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.036807836907275814 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121633, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121633 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.0242831405294673, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.0242831405294673 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.029582245128384303, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.029582245128384303 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4129353233830846, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.4129353233830846, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.034140140070440354, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.034140140070440354 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.02418049716437691, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.02418049716437691 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3670520231213873, + "acc_stderr": 0.02595005433765408, + "acc_norm": 0.3670520231213873, + "acc_norm_stderr": 0.02595005433765408 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38580246913580246, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.38580246913580246, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.035415085788840193, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.035415085788840193 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373146, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373146 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3871559633027523, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.3871559633027523, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.027780141207023334, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.027780141207023334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351585, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223977, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320196, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320196 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697625, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697625 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331149, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.027472274473233818, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.027472274473233818 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.32653061224489793, + "acc_stderr": 0.030021056238440327, + "acc_norm": 0.32653061224489793, + "acc_norm_stderr": 0.030021056238440327 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698604, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698604 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.4747810026483803, + "mc2_stderr": 0.016087880887613513 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3010625737898465, + "acc_stderr": 0.015771113299945457, + "acc_norm": 0.3140495867768595, + "acc_norm_stderr": 0.015957332434295066 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4", + "model_sha": "13e5692b7a084265617f75f81209dce34e414489", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-orca-platy-2k/result_2023-10-23 06:43:37.json b/MNCJihun/Mistral-7B-orca-platy-2k/result_2023-10-23 06:43:37.json new file mode 100644 index 0000000000000000000000000000000000000000..7c1a49fa48b43b81cbe4f0d9ac40db3b58e6cb21 --- /dev/null +++ b/MNCJihun/Mistral-7B-orca-platy-2k/result_2023-10-23 06:43:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2977815699658703, + "acc_stderr": 0.013363080107244489, + "acc_norm": 0.33361774744027306, + "acc_norm_stderr": 0.013778687054176541 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3686516630153356, + "acc_stderr": 0.004814532642574648, + "acc_norm": 0.46086436964748057, + "acc_norm_stderr": 0.004974473255391268 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529918, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529918 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.0484674825397724, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.0484674825397724 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41890166028097064, + "acc_stderr": 0.01764320505237717, + "acc_norm": 0.41890166028097064, + "acc_norm_stderr": 0.01764320505237717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977109, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977109 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386694, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386694 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085342, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085342 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.034273086529999344, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.034273086529999344 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461224, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461224 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.047128212574267705, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.047128212574267705 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38064516129032255, + "acc_stderr": 0.027621717832907036, + "acc_norm": 0.38064516129032255, + "acc_norm_stderr": 0.027621717832907036 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.0295822451283843, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.0295822451283843 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587194, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587194 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208732, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208732 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194974, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194974 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.026362437574546545, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.026362437574546545 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.038020681028996146, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.038020681028996146 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579861, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579861 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.021244146569074345, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.021244146569074345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.02692565465361569, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.02692565465361569 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507215, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507215 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639882, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915206, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915206 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476787, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476787 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687765, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687765 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142804, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142804 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.012020128195985759, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.012020128195985759 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.4593881639963632, + "mc2_stderr": 0.01579718957910925 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.35182998819362454, + "acc_stderr": 0.016418206451218057, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.016819438642971408 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihun/Mistral-7B-orca-platy-2k", + "model_sha": "45eb0f68911f65b3a5ac83a851c716add059bf5a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihunKim/MIstral-7B-SlimOrca-OP-2k/result_2023-10-26 01:19:41.json b/MNCJihunKim/MIstral-7B-SlimOrca-OP-2k/result_2023-10-26 01:19:41.json new file mode 100644 index 0000000000000000000000000000000000000000..d5cdccf95db237450070f18c4b363c47bbd0eedf --- /dev/null +++ b/MNCJihunKim/MIstral-7B-SlimOrca-OP-2k/result_2023-10-26 01:19:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.35238907849829354, + "acc_norm_stderr": 0.01396014260059868 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36536546504680345, + "acc_stderr": 0.004805483767055344, + "acc_norm": 0.45648277235610435, + "acc_norm_stderr": 0.004970846697552307 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4355044699872286, + "acc_stderr": 0.01773058992792662, + "acc_norm": 0.4355044699872286, + "acc_norm_stderr": 0.01773058992792662 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.024962683564331817, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.024962683564331817 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883231, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883231 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.02766618207553963, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.02766618207553963 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286102, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286102 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815646, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815646 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696525, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520196, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520196 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.038818912133343826, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.038818912133343826 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871595, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44036697247706424, + "acc_stderr": 0.021284310623761547, + "acc_norm": 0.44036697247706424, + "acc_norm_stderr": 0.021284310623761547 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.01918463932809249, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.01918463932809249 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403196, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104097, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104097 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768542, + "mc2": 0.4631702412075074, + "mc2_stderr": 0.01580874554216882 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3789846517119244, + "acc_stderr": 0.01667926068422929, + "acc_norm": 0.43683589138134593, + "acc_norm_stderr": 0.017052633559856076 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihunKim/MIstral-7B-SlimOrca-OP-2k", + "model_sha": "339ce8fcda3879a2a6e0dbe0ffb06d1f0be9fd15", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover/result_2023-10-25 15:13:11.json b/MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover/result_2023-10-25 15:13:11.json new file mode 100644 index 0000000000000000000000000000000000000000..289f0936786c4552f974239a33b7c4de4946ac00 --- /dev/null +++ b/MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover/result_2023-10-25 15:13:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.013975454122756557, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398322 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3813981278629755, + "acc_stderr": 0.004847372670134637, + "acc_norm": 0.48954391555467036, + "acc_norm_stderr": 0.00498869022950566 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4776500638569604, + "acc_stderr": 0.017862091778507876, + "acc_norm": 0.4776500638569604, + "acc_norm_stderr": 0.017862091778507876 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562786, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562786 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009225, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009225 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.028443414226438316, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.028443414226438316 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768818, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768818 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702862, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702862 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778657, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778657 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159795, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797609, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797609 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.01957695312208884, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.01957695312208884 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.046840993210771065, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.046840993210771065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2022346368715084, + "acc_stderr": 0.01343372948332099, + "acc_norm": 0.2022346368715084, + "acc_norm_stderr": 0.01343372948332099 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682487, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31029986962190353, + "acc_stderr": 0.011815439293469829, + "acc_norm": 0.31029986962190353, + "acc_norm_stderr": 0.011815439293469829 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.4750791587895867, + "mc2_stderr": 0.015736885636484024 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.01706769977431298, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.01718976703213082 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover", + "model_sha": "6a36ede83f774993cca1e5193c0c702e4b998676", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihunKim/Mistral-7B-SlimOrca-OP-8k/result_2023-10-26 04:06:59.json b/MNCJihunKim/Mistral-7B-SlimOrca-OP-8k/result_2023-10-26 04:06:59.json new file mode 100644 index 0000000000000000000000000000000000000000..145a1d19b567aa342917393d1e553481eb8cf123 --- /dev/null +++ b/MNCJihunKim/Mistral-7B-SlimOrca-OP-8k/result_2023-10-26 04:06:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30716723549488056, + "acc_stderr": 0.013481034054980945, + "acc_norm": 0.34215017064846415, + "acc_norm_stderr": 0.013864152159177278 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36317466640111534, + "acc_stderr": 0.004799317209902018, + "acc_norm": 0.4519020115514838, + "acc_norm_stderr": 0.0049666408680838605 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066165, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066165 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.438058748403576, + "acc_stderr": 0.01774223223825723, + "acc_norm": 0.438058748403576, + "acc_norm_stderr": 0.01774223223825723 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380056, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380056 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3987138263665595, + "acc_stderr": 0.0278093225857745, + "acc_norm": 0.3987138263665595, + "acc_norm_stderr": 0.0278093225857745 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.0324430528300873, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.0324430528300873 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467766, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467766 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830517, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830517 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.02786932057166464, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02786932057166464 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332786, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332786 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.02581675679158419, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.02581675679158419 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.027002521034516468, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.027002521034516468 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036093, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036093 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.02124414656907434, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.02124414656907434 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791438, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791438 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.03842498559395269, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.03842498559395269 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495155, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495155 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.043642261558410445, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.043642261558410445 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808847, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808847 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842982, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4607470933289765, + "mc2_stderr": 0.015783351321862177 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3447461629279811, + "acc_stderr": 0.016340649905418697, + "acc_norm": 0.41086186540731995, + "acc_norm_stderr": 0.01691497276784107 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihunKim/Mistral-7B-SlimOrca-OP-8k", + "model_sha": "70b643a9304f4b45ca6ae3b4ff6afbd8f8967145", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihunKim/Mistral-7B-SlimOrca-orca-platy-out1kover/result_2023-10-25 15:12:52.json b/MNCJihunKim/Mistral-7B-SlimOrca-orca-platy-out1kover/result_2023-10-25 15:12:52.json new file mode 100644 index 0000000000000000000000000000000000000000..8ebafe0aae63a671395032bd5374543291de2de9 --- /dev/null +++ b/MNCJihunKim/Mistral-7B-SlimOrca-orca-platy-out1kover/result_2023-10-25 15:12:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35921501706484643, + "acc_stderr": 0.014020224155839155, + "acc_norm": 0.41552901023890787, + "acc_norm_stderr": 0.014401366641216395 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3822943636725752, + "acc_stderr": 0.004849547819134474, + "acc_norm": 0.4878510256920932, + "acc_norm_stderr": 0.004988308234687271 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431662, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431662 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.02961432369045665, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.02961432369045665 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149126, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149126 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48623853211009177, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.48623853211009177, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981747, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981747 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265015, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265015 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2011173184357542, + "acc_stderr": 0.013405946402609045, + "acc_norm": 0.2011173184357542, + "acc_norm_stderr": 0.013405946402609045 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5274261603375527, + "acc_stderr": 0.03249822718301304, + "acc_norm": 0.5274261603375527, + "acc_norm_stderr": 0.03249822718301304 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32073011734028684, + "acc_stderr": 0.011921199991782625, + "acc_norm": 0.32073011734028684, + "acc_norm_stderr": 0.011921199991782625 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4582449322023691, + "mc2_stderr": 0.015573281761179949 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41912632821723733, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.017173944474294375 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihunKim/Mistral-7B-SlimOrca-orca-platy-out1kover", + "model_sha": "fcc2973dac87df41de97b6972e0323fee599bcf3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCKim/Mistral-7B-OpenHermes/result_2023-10-26 08:29:28.json b/MNCKim/Mistral-7B-OpenHermes/result_2023-10-26 08:29:28.json new file mode 100644 index 0000000000000000000000000000000000000000..2790f061466368e66dd82e297b1da9e3164bbe87 --- /dev/null +++ b/MNCKim/Mistral-7B-OpenHermes/result_2023-10-26 08:29:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34215017064846415, + "acc_stderr": 0.013864152159177278, + "acc_norm": 0.38822525597269625, + "acc_norm_stderr": 0.014241614207414044 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38020314678350925, + "acc_stderr": 0.004844445265582655, + "acc_norm": 0.4870543716391157, + "acc_norm_stderr": 0.004988108663179765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.01785298126663396, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.01785298126663396 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236785, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236785 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520196, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520196 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.040894654493255835, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.040894654493255835 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579861, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579861 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5137614678899083, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.5137614678899083, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949097, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949097 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.01994491413687358, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.01994491413687358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115886, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20446927374301677, + "acc_stderr": 0.013488813404711909, + "acc_norm": 0.20446927374301677, + "acc_norm_stderr": 0.013488813404711909 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824873, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824873 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3559322033898305, + "acc_stderr": 0.01222864553727757, + "acc_norm": 0.3559322033898305, + "acc_norm_stderr": 0.01222864553727757 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.016238065069059615, + "mc2": 0.49276821876862364, + "mc2_stderr": 0.015815875390844718 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.01706769977431297, + "acc_norm": 0.4734356552538371, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCKim/Mistral-7B-OpenHermes", + "model_sha": "847254b43b055cbe217b7aedf1219942457aa942", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran2k/result_2023-10-26 05:19:08.json b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran2k/result_2023-10-26 05:19:08.json new file mode 100644 index 0000000000000000000000000000000000000000..5f474b55d206213beb022c98ab6b1d2c6801d929 --- /dev/null +++ b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran2k/result_2023-10-26 05:19:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3037542662116041, + "acc_stderr": 0.013438909184778762, + "acc_norm": 0.34044368600682595, + "acc_norm_stderr": 0.013847460518892976 + }, + "harness|ko_hellaswag|10": { + "acc": 0.359788886675961, + "acc_stderr": 0.0047895751634186535, + "acc_norm": 0.45180242979486157, + "acc_norm_stderr": 0.004966544724452225 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4099616858237548, + "acc_stderr": 0.017587672312336055, + "acc_norm": 0.4099616858237548, + "acc_norm_stderr": 0.017587672312336055 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.041716541613545426, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.041716541613545426 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3536977491961415, + "acc_stderr": 0.02715520810320086, + "acc_norm": 0.3536977491961415, + "acc_norm_stderr": 0.02715520810320086 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.03446897738659332, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.03446897738659332 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.040824829046386284, + "acc_norm": 0.4, + "acc_norm_stderr": 0.040824829046386284 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931673, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931673 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.024283140529467298, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.024283140529467298 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.0333276906841079, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.0333276906841079 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.027273890594300642, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.027273890594300642 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.38113207547169814, + "acc_stderr": 0.029890609686286616, + "acc_norm": 0.38113207547169814, + "acc_norm_stderr": 0.029890609686286616 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101817, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101817 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.025624723994030454, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.025624723994030454 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3522935779816514, + "acc_stderr": 0.020480568843998993, + "acc_norm": 0.3522935779816514, + "acc_norm_stderr": 0.020480568843998993 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.0273053080762747, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.0273053080762747 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093092, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093092 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053479, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053479 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.0305467452649532, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.0305467452649532 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497724, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497724 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.026431329870789524, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.026431329870789524 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784617, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784617 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.42616033755274263, + "acc_stderr": 0.032190357031317736, + "acc_norm": 0.42616033755274263, + "acc_norm_stderr": 0.032190357031317736 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2848761408083442, + "acc_stderr": 0.011527830846369038, + "acc_norm": 0.2848761408083442, + "acc_norm_stderr": 0.011527830846369038 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674119, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674119 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4567888942781546, + "mc2_stderr": 0.015721003734360934 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3400236127508855, + "acc_stderr": 0.01628671722073768, + "acc_norm": 0.41912632821723733, + "acc_norm_stderr": 0.016963995010862792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran2k", + "model_sha": "b7d5d28670cc0536eff52f462ec04de3712fd4e9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran4k/result_2023-10-26 05:19:05.json b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran4k/result_2023-10-26 05:19:05.json new file mode 100644 index 0000000000000000000000000000000000000000..8cb5674e3fdcf780de3a918c79ad092fc8fa4240 --- /dev/null +++ b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran4k/result_2023-10-26 05:19:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2977815699658703, + "acc_stderr": 0.013363080107244492, + "acc_norm": 0.3395904436860068, + "acc_norm_stderr": 0.013839039762820164 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3595897231627166, + "acc_stderr": 0.004788994060654273, + "acc_norm": 0.4554869547898825, + "acc_norm_stderr": 0.004969968458256169 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457921, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457921 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.04656147110012351, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.04656147110012351 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34227330779054915, + "acc_stderr": 0.016967031766413624, + "acc_norm": 0.34227330779054915, + "acc_norm_stderr": 0.016967031766413624 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552004, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818788, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818788 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847836, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847836 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380565, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380565 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.031618563353586086, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.031618563353586086 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27741935483870966, + "acc_stderr": 0.025470196835900055, + "acc_norm": 0.27741935483870966, + "acc_norm_stderr": 0.025470196835900055 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36752136752136755, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.36752136752136755, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493868, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493868 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.046313813194254635, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.046313813194254635 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712156, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712156 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008937, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008937 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.30845771144278605, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.30845771144278605, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194974, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194974 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.024685316867257803, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.024685316867257803 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294674, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294674 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27706422018348625, + "acc_stderr": 0.019188482590169535, + "acc_norm": 0.27706422018348625, + "acc_norm_stderr": 0.019188482590169535 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523809, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523809 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.025553169991826514, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.025553169991826514 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882925, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.01846315413263282, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.01846315413263282 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.17592592592592593, + "acc_stderr": 0.02596742095825853, + "acc_norm": 0.17592592592592593, + "acc_norm_stderr": 0.02596742095825853 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925293, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925293 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21323529411764705, + "acc_stderr": 0.024880971512294264, + "acc_norm": 0.21323529411764705, + "acc_norm_stderr": 0.024880971512294264 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3080168776371308, + "acc_stderr": 0.0300523893356057, + "acc_norm": 0.3080168776371308, + "acc_norm_stderr": 0.0300523893356057 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23859191655801826, + "acc_stderr": 0.010885929742002209, + "acc_norm": 0.23859191655801826, + "acc_norm_stderr": 0.010885929742002209 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501954, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501954 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.45719878783289014, + "mc2_stderr": 0.01579045306232963 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3010625737898465, + "acc_stderr": 0.015771113299945457, + "acc_norm": 0.38488783943329397, + "acc_norm_stderr": 0.01672857970149866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran4k", + "model_sha": "397f2df4c4563a7b94ab4c30493004f89edf5eec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCKim/Mistral-7B-SlimOrca-OP-U2048-top2k/result_2023-10-26 05:18:54.json b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-top2k/result_2023-10-26 05:18:54.json new file mode 100644 index 0000000000000000000000000000000000000000..31822a22868abc1055aa6d1ef3358eaf987e6423 --- /dev/null +++ b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-top2k/result_2023-10-26 05:18:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3037542662116041, + "acc_stderr": 0.013438909184778757, + "acc_norm": 0.35580204778157, + "acc_norm_stderr": 0.01399057113791876 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3489344752041426, + "acc_stderr": 0.004756590961576588, + "acc_norm": 0.4340768771161123, + "acc_norm_stderr": 0.0049462215121452826 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44061302681992337, + "acc_stderr": 0.017753396973908486, + "acc_norm": 0.44061302681992337, + "acc_norm_stderr": 0.017753396973908486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793254, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793254 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.0281291127091659, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.0281291127091659 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.03098029699261855, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.03098029699261855 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342668, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342668 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361816, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361816 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579859, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579859 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495144, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495144 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.02889395541211589, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.02889395541211589 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.045723723587374296, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.045723723587374296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.01448750085285041, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.01448750085285041 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254177, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254177 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.0320068202016391, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.0320068202016391 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.46835443037974683, + "acc_stderr": 0.03248197400511075, + "acc_norm": 0.46835443037974683, + "acc_norm_stderr": 0.03248197400511075 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30247718383311606, + "acc_stderr": 0.011731524234165704, + "acc_norm": 0.30247718383311606, + "acc_norm_stderr": 0.011731524234165704 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879076, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879076 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31456548347613217, + "mc1_stderr": 0.016255241993179195, + "mc2": 0.4866055692949919, + "mc2_stderr": 0.015740372637770925 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3234946871310508, + "acc_stderr": 0.016083627290483668, + "acc_norm": 0.36481700118063753, + "acc_norm_stderr": 0.016550144337046588 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCKim/Mistral-7B-SlimOrca-OP-U2048-top2k", + "model_sha": "9ea446751434a20492fc12f4843c9cdc8d8084b8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCKim/Mistral-7B-SlimOrca-OP-U2048-top4k/result_2023-10-26 05:18:57.json b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-top4k/result_2023-10-26 05:18:57.json new file mode 100644 index 0000000000000000000000000000000000000000..6ae7c519d2bfa2a131e8b5170dff1686db389f63 --- /dev/null +++ b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-top4k/result_2023-10-26 05:18:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2986348122866894, + "acc_stderr": 0.013374078615068757, + "acc_norm": 0.34812286689419797, + "acc_norm_stderr": 0.013921008595179344 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3474407488548098, + "acc_stderr": 0.004751840646730853, + "acc_norm": 0.4311890061740689, + "acc_norm_stderr": 0.004942302768002104 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41890166028097064, + "acc_stderr": 0.01764320505237717, + "acc_norm": 0.41890166028097064, + "acc_norm_stderr": 0.01764320505237717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.02804339985821063, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.02804339985821063 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.024939313906940777, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.024939313906940777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.027941727346256308, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.027941727346256308 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342582, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342582 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028428, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028428 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261736, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261736 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379417, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379417 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.019333142020797056, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.019333142020797056 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320196, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320196 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647206, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647206 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.39, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48523206751054854, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.48523206751054854, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.011901895635786095, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.011901895635786095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321322992, + "mc2": 0.4763580752793618, + "mc2_stderr": 0.01591246406391595 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3435655253837072, + "acc_stderr": 0.016327334806429145, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCKim/Mistral-7B-SlimOrca-OP-U2048-top4k", + "model_sha": "0df21efbb44a7aeac958f99c94d27887bdeb7e04", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-KoCot-Platypus-4096/result_2023-10-24 10:48:31.json b/MNCLLM/Mistral-7B-KoCot-Platypus-4096/result_2023-10-24 10:48:31.json new file mode 100644 index 0000000000000000000000000000000000000000..0a9411d46dc2e719f84a7907695d6203fc01865a --- /dev/null +++ b/MNCLLM/Mistral-7B-KoCot-Platypus-4096/result_2023-10-24 10:48:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28668941979522183, + "acc_stderr": 0.01321498632927477, + "acc_norm": 0.3387372013651877, + "acc_norm_stderr": 0.01383056892797433 + }, + "harness|ko_hellaswag|10": { + "acc": 0.344353714399522, + "acc_stderr": 0.004741859753178415, + "acc_norm": 0.4213304122684724, + "acc_norm_stderr": 0.004927631806477553 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3946360153256705, + "acc_stderr": 0.017478464305911542, + "acc_norm": 0.3946360153256705, + "acc_norm_stderr": 0.017478464305911542 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40514469453376206, + "acc_stderr": 0.027882383791325946, + "acc_norm": 0.40514469453376206, + "acc_norm_stderr": 0.027882383791325946 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289202, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289202 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768362, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768362 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185554, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185554 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931673, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931673 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.024121125416941183, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.024121125416941183 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.027430866579973474, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.027430866579973474 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5683760683760684, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.5683760683760684, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493854, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493854 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228416, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008937, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008937 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43283582089552236, + "acc_stderr": 0.0350349092367328, + "acc_norm": 0.43283582089552236, + "acc_norm_stderr": 0.0350349092367328 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.0236369759961018, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.0236369759961018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3786127167630058, + "acc_stderr": 0.026113749361310338, + "acc_norm": 0.3786127167630058, + "acc_norm_stderr": 0.026113749361310338 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33641975308641975, + "acc_stderr": 0.026289734945952926, + "acc_norm": 0.33641975308641975, + "acc_norm_stderr": 0.026289734945952926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.034234651001042816, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.034234651001042816 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3871559633027523, + "acc_stderr": 0.020884231992643453, + "acc_norm": 0.3871559633027523, + "acc_norm_stderr": 0.020884231992643453 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.027475969910660952, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.027475969910660952 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.018950886770806308, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.018950886770806308 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2659217877094972, + "acc_stderr": 0.014776765066438888, + "acc_norm": 0.2659217877094972, + "acc_norm_stderr": 0.014776765066438888 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.027778298701545436, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.027778298701545436 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.03055531675557364, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.03055531675557364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4008438818565401, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.4008438818565401, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.01196531153657153, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.01196531153657153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.01572313952460875, + "mc2": 0.44624551916312966, + "mc2_stderr": 0.015796983100879885 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.15820543093270367, + "acc_stderr": 0.012546672797728753, + "acc_norm": 0.179456906729634, + "acc_norm_stderr": 0.013193062031400433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCLLM/Mistral-7B-KoCot-Platypus-4096", + "model_sha": "bbb51b457200947001a0dc6e318a7d2d7e717197", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-OP-over1k-grad0.3/result_2023-10-25 09:13:09.json b/MNCLLM/Mistral-7B-OP-over1k-grad0.3/result_2023-10-25 09:13:09.json new file mode 100644 index 0000000000000000000000000000000000000000..8442cf12fd8b00881c4e515eeac1338894a8c2b1 --- /dev/null +++ b/MNCLLM/Mistral-7B-OP-over1k-grad0.3/result_2023-10-25 09:13:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.014070265519268802, + "acc_norm": 0.4104095563139932, + "acc_norm_stderr": 0.014374922192642662 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38309101772555265, + "acc_stderr": 0.004851466623601446, + "acc_norm": 0.4949213304122685, + "acc_norm_stderr": 0.0049895240030924425 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041982, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033582, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033582 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734026, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734026 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739428, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739428 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756653, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2100558659217877, + "acc_stderr": 0.013623755371333519, + "acc_norm": 0.2100558659217877, + "acc_norm_stderr": 0.013623755371333519 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763126, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763126 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.03172295004332331, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.03172295004332331 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.011901895635786088, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.011901895635786088 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777305, + "mc2": 0.4637619506541597, + "mc2_stderr": 0.015446438806039912 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45808736717827625, + "acc_stderr": 0.01712985211791114, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.017185069732676528 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCLLM/Mistral-7B-OP-over1k-grad0.3", + "model_sha": "4053a441cc7724e204d047f88c2b1646a1d6aad2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-OP-over1k-grad1.0/result_2023-10-25 09:13:00.json b/MNCLLM/Mistral-7B-OP-over1k-grad1.0/result_2023-10-25 09:13:00.json new file mode 100644 index 0000000000000000000000000000000000000000..4dce8f6eca5f1cad8d8b042f6bad95a8d51298f1 --- /dev/null +++ b/MNCLLM/Mistral-7B-OP-over1k-grad1.0/result_2023-10-25 09:13:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038076, + "acc_norm": 0.41723549488054607, + "acc_norm_stderr": 0.014409825518403084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.386476797450707, + "acc_stderr": 0.00485946798415526, + "acc_norm": 0.4965146385182235, + "acc_norm_stderr": 0.00498966018079217 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.02529460802398648, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.02529460802398648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.021414757058175506, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.021414757058175506 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094593, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094593 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849646, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849646 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384308, + "mc2": 0.4667125764870672, + "mc2_stderr": 0.015432249803510123 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4722550177095632, + "acc_stderr": 0.01716386797945601, + "acc_norm": 0.5277449822904369, + "acc_norm_stderr": 0.017163867979456016 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCLLM/Mistral-7B-OP-over1k-grad1.0", + "model_sha": "b03dd11e5e2e64d2c59bf37ab513947869606609", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-OP-over500-grad1.0/result_2023-10-25 09:14:48.json b/MNCLLM/Mistral-7B-OP-over500-grad1.0/result_2023-10-25 09:14:48.json new file mode 100644 index 0000000000000000000000000000000000000000..6367399fd75b13c15ed399c2a4cdbabab27a6000 --- /dev/null +++ b/MNCLLM/Mistral-7B-OP-over500-grad1.0/result_2023-10-25 09:14:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142817, + "acc_norm": 0.4283276450511945, + "acc_norm_stderr": 0.014460496367599022 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37950607448715395, + "acc_stderr": 0.004842723234022034, + "acc_norm": 0.481876120294762, + "acc_norm_stderr": 0.00498650229693118 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.017870847506081717, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.017870847506081717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.027982680459759556, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.027982680459759556 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.024915243985987837, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.024915243985987837 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344944, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344944 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739428, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739428 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416908, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416908 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666633, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666633 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.0275860062216077, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.0275860062216077 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45321100917431195, + "acc_stderr": 0.021343255165546034, + "acc_norm": 0.45321100917431195, + "acc_norm_stderr": 0.021343255165546034 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.019886221037501872, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.019886221037501872 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828978, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828978 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898435, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898435 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.03055531675557364, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.03055531675557364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32073011734028684, + "acc_stderr": 0.011921199991782629, + "acc_norm": 0.32073011734028684, + "acc_norm_stderr": 0.011921199991782629 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062788, + "mc2": 0.46295306302174644, + "mc2_stderr": 0.015320970978421385 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.01690006287942712, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.017175671279836442 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCLLM/Mistral-7B-OP-over500-grad1.0", + "model_sha": "f7789c5af9b3b166070a886207090228deccf9d6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-orca-platy-over1k/result_2023-10-25 06:43:45.json b/MNCLLM/Mistral-7B-orca-platy-over1k/result_2023-10-25 06:43:45.json new file mode 100644 index 0000000000000000000000000000000000000000..55c19cd09090d3dddcb47da2f37cb10f8544237a --- /dev/null +++ b/MNCLLM/Mistral-7B-orca-platy-over1k/result_2023-10-25 06:43:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038076, + "acc_norm": 0.41723549488054607, + "acc_norm_stderr": 0.014409825518403084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.386476797450707, + "acc_stderr": 0.00485946798415526, + "acc_norm": 0.4965146385182235, + "acc_norm_stderr": 0.00498966018079217 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.017841995750520857, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.017841995750520857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.02529460802398648, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.02529460802398648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.021414757058175506, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.021414757058175506 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094593, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094593 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849646, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849646 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384308, + "mc2": 0.4667008752277657, + "mc2_stderr": 0.015432114393165898 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4722550177095632, + "acc_stderr": 0.01716386797945601, + "acc_norm": 0.5277449822904369, + "acc_norm_stderr": 0.017163867979456016 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCLLM/Mistral-7B-orca-platy-over1k", + "model_sha": "65fda49b7459f17a98b8d1c5136001698f647919", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MRAIRR/MRAI_synatra_7B_v1/result_2023-11-24 16:23:24.json b/MRAIRR/MRAI_synatra_7B_v1/result_2023-11-24 16:23:24.json new file mode 100644 index 0000000000000000000000000000000000000000..e0f0e2e5c4709f13f564565fb92d37ad2c318e5b --- /dev/null +++ b/MRAIRR/MRAI_synatra_7B_v1/result_2023-11-24 16:23:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.4377133105802048, + "acc_norm_stderr": 0.014497573881108282 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3868751244771958, + "acc_stderr": 0.004860393011974675, + "acc_norm": 0.4931288587930691, + "acc_norm_stderr": 0.00498931022827612 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.01784491809046855, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.01784491809046855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.02493931390694077, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.02493931390694077 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.02818173972001942, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.02818173972001942 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524582, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524582 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.023973861998992072, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.023973861998992072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.02680372058320619, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.02680372058320619 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422708, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44220183486238535, + "acc_stderr": 0.021293613207520205, + "acc_norm": 0.44220183486238535, + "acc_norm_stderr": 0.021293613207520205 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528784, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528784 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849726, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866342, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866342 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.02755336616510137, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.02755336616510137 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553996, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553996 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.02866199620233531, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.02866199620233531 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.03210353032241268, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.03210353032241268 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464754, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464754 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.45897203021636795, + "mc2_stderr": 0.015978279165358995 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41440377804014167, + "acc_stderr": 0.016936583383943642, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.0170725258755631 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MRAIRR/MRAI_synatra_7B_v1", + "model_sha": "2232a0c5aaffdf526fffd3516ff28b7bf6679378", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MRAIRR/Navistral/result_2023-11-06 10:51:25.json b/MRAIRR/Navistral/result_2023-11-06 10:51:25.json new file mode 100644 index 0000000000000000000000000000000000000000..0cb8e571b7ad478d73826fb64b17d2837b502bea --- /dev/null +++ b/MRAIRR/Navistral/result_2023-11-06 10:51:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2721843003412969, + "acc_stderr": 0.013006600406423707, + "acc_norm": 0.31143344709897613, + "acc_norm_stderr": 0.013532472099850942 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3149770961959769, + "acc_stderr": 0.004635574339176323, + "acc_norm": 0.382194781915953, + "acc_norm_stderr": 0.004849306998727776 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4125159642401022, + "acc_stderr": 0.017604149108671936, + "acc_norm": 0.4125159642401022, + "acc_norm_stderr": 0.017604149108671936 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419871, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419871 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38064516129032255, + "acc_stderr": 0.02762171783290704, + "acc_norm": 0.38064516129032255, + "acc_norm_stderr": 0.02762171783290704 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.03050329201334259, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.03050329201334259 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142263, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142263 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38580246913580246, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.38580246913580246, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39378238341968913, + "acc_stderr": 0.03526077095548237, + "acc_norm": 0.39378238341968913, + "acc_norm_stderr": 0.03526077095548237 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3779816513761468, + "acc_stderr": 0.020789187066728117, + "acc_norm": 0.3779816513761468, + "acc_norm_stderr": 0.020789187066728117 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.037827289808654706, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.037827289808654706 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963745, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963745 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714864, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669276, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669276 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31681877444589307, + "acc_stderr": 0.011882349954723008, + "acc_norm": 0.31681877444589307, + "acc_norm_stderr": 0.011882349954723008 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.03343311240488419, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.03343311240488419 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.0372820699868265, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.0372820699868265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4540203721938441, + "mc2_stderr": 0.015668476056429896 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3990554899645809, + "acc_stderr": 0.016836377292849296, + "acc_norm": 0.4427390791027155, + "acc_norm_stderr": 0.017077254131556217 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MRAIRR/Navistral", + "model_sha": "591fda7ce94712932e454509cf3ea4c24d9dd619", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MRAIRR/Nextstage/result_2023-11-01 03:26:01.json b/MRAIRR/Nextstage/result_2023-11-01 03:26:01.json new file mode 100644 index 0000000000000000000000000000000000000000..363f76a4c65156bd9da909fea2db74503f09ca57 --- /dev/null +++ b/MRAIRR/Nextstage/result_2023-11-01 03:26:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2773037542662116, + "acc_stderr": 0.013082095839059374, + "acc_norm": 0.32593856655290104, + "acc_norm_stderr": 0.013697432466693246 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3405696076478789, + "acc_stderr": 0.004729322613301549, + "acc_norm": 0.4224258115913165, + "acc_norm_stderr": 0.004929361040558251 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44316730523627074, + "acc_stderr": 0.017764085035348386, + "acc_norm": 0.44316730523627074, + "acc_norm_stderr": 0.017764085035348386 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596239, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596239 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.028173917761762875, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.028173917761762875 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.46464646464646464, + "acc_stderr": 0.035534363688280626, + "acc_norm": 0.46464646464646464, + "acc_norm_stderr": 0.035534363688280626 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.025174048384000766, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.025174048384000766 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.027906150826041143, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.027906150826041143 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349465, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.029946498567699948, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.029946498567699948 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670238, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670238 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606647, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606647 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206184, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206184 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38580246913580246, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.38580246913580246, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44403669724770645, + "acc_stderr": 0.021302621211654525, + "acc_norm": 0.44403669724770645, + "acc_norm_stderr": 0.021302621211654525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557836, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557836 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.018926082916083393, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.018926082916083393 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.01453033020146864, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.01453033020146864 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.03254693801802008, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.03254693801802008 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.011717148751648431, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.011717148751648431 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.033744993563193555, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.033744993563193555 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834562, + "mc2": 0.42637566603576926, + "mc2_stderr": 0.015537081390223764 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.35537190082644626, + "acc_stderr": 0.01645549600031453, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.016844693510505052 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MRAIRR/Nextstage", + "model_sha": "9457f0fd266dc20b3808e56fc81d9242d2a9486a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/DopeorNope-Maestro-v1-13B/result_2023-11-29 14:33:57.json b/MarkrAI/DopeorNope-Maestro-v1-13B/result_2023-11-29 14:33:57.json new file mode 100644 index 0000000000000000000000000000000000000000..18bbad2da3f7f0add930b9d6845c05edbeaa863f --- /dev/null +++ b/MarkrAI/DopeorNope-Maestro-v1-13B/result_2023-11-29 14:33:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.0144418896274644, + "acc_norm": 0.4761092150170648, + "acc_norm_stderr": 0.014594701798071655 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4298944433379805, + "acc_stderr": 0.00494049050824065, + "acc_norm": 0.5765783708424617, + "acc_norm_stderr": 0.00493091151508479 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.01775880053421441, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.01775880053421441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836918, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836918 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245403, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245403 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389188, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389188 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230182, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230182 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523846, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523846 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.02082814851702261, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.02082814851702261 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.03000856284500347, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.03000856284500347 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3709256844850065, + "acc_stderr": 0.012337391684530312, + "acc_norm": 0.3709256844850065, + "acc_norm_stderr": 0.012337391684530312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882466, + "mc2": 0.46630606087545945, + "mc2_stderr": 0.015285468253345829 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4651711924439197, + "acc_stderr": 0.017148598015747422, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/DopeorNope-Maestro-v1-13B", + "model_sha": "c68224b35bf9eb9c2f55270489870f5a6451871f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/DopeorNope-maestro-v1-DPO-13b/result_2023-11-29 17:12:08.json b/MarkrAI/DopeorNope-maestro-v1-DPO-13b/result_2023-11-29 17:12:08.json new file mode 100644 index 0000000000000000000000000000000000000000..9c97b67b7da0d3e6b1f0994c8af5cd7d072f1c13 --- /dev/null +++ b/MarkrAI/DopeorNope-maestro-v1-DPO-13b/result_2023-11-29 17:12:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.0144418896274644, + "acc_norm": 0.47696245733788395, + "acc_norm_stderr": 0.014595873205358273 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4298944433379805, + "acc_stderr": 0.00494049050824065, + "acc_norm": 0.5764787890858395, + "acc_norm_stderr": 0.0049310654341736876 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.01775880053421441, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.01775880053421441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054064, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054064 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245403, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245403 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389188, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389188 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523846, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523846 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.02082814851702261, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.02082814851702261 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.03000856284500347, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.03000856284500347 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37027379400260757, + "acc_stderr": 0.012332930781256723, + "acc_norm": 0.37027379400260757, + "acc_norm_stderr": 0.012332930781256723 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882466, + "mc2": 0.4663124402531116, + "mc2_stderr": 0.015286600923431525 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4651711924439197, + "acc_stderr": 0.017148598015747422, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/DopeorNope-maestro-v1-DPO-13b", + "model_sha": "c7a7352cb9fa7e2f84385eef0d84de34a0417129", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/DopeorNope-maestro-v1.1-DPO-13b/result_2023-11-29 22:34:39.json b/MarkrAI/DopeorNope-maestro-v1.1-DPO-13b/result_2023-11-29 22:34:39.json new file mode 100644 index 0000000000000000000000000000000000000000..8b0a1dce04f3489c631de6cb3c518bcab21abb75 --- /dev/null +++ b/MarkrAI/DopeorNope-maestro-v1.1-DPO-13b/result_2023-11-29 22:34:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.0144418896274644, + "acc_norm": 0.4761092150170648, + "acc_norm_stderr": 0.014594701798071655 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4298944433379805, + "acc_stderr": 0.00494049050824065, + "acc_norm": 0.5763792073292173, + "acc_norm_stderr": 0.004931219148182245 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.01775880053421441, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.01775880053421441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836918, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836918 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245403, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245403 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523846, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523846 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.02082814851702261, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.02082814851702261 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.03000856284500347, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.03000856284500347 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36897001303780963, + "acc_stderr": 0.012323936650174859, + "acc_norm": 0.36897001303780963, + "acc_norm_stderr": 0.012323936650174859 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882466, + "mc2": 0.46627654282840275, + "mc2_stderr": 0.015286096744214328 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4651711924439197, + "acc_stderr": 0.017148598015747422, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/DopeorNope-maestro-v1.1-DPO-13b", + "model_sha": "8dc70bf0ccd7914ca6ebbe2e661f783e69172b95", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/DopeorNope-maestro-v2-DPO-13b/result_2023-11-30 07:24:14.json b/MarkrAI/DopeorNope-maestro-v2-DPO-13b/result_2023-11-30 07:24:14.json new file mode 100644 index 0000000000000000000000000000000000000000..b1113b6bee8734dab7c3a0f960c03bd48308f70b --- /dev/null +++ b/MarkrAI/DopeorNope-maestro-v2-DPO-13b/result_2023-11-30 07:24:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910467, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955264 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42103166699860584, + "acc_stderr": 0.00492715588259819, + "acc_norm": 0.5669189404501095, + "acc_norm_stderr": 0.004944889545497954 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5236270753512133, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.5236270753512133, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736413, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736413 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.02286083830923207, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.02286083830923207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.01961085147488029, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.01961085147488029 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079105, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079105 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2926988265971317, + "acc_stderr": 0.011620949195849531, + "acc_norm": 0.2926988265971317, + "acc_norm_stderr": 0.011620949195849531 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.42260797250546406, + "mc2_stderr": 0.014810055872742214 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682868, + "acc_norm": 0.6162927981109799, + "acc_norm_stderr": 0.016718924637231826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/DopeorNope-maestro-v2-DPO-13b", + "model_sha": "e6aeb2886265d8bdbfda9460926c63b991694ce9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/DopeorNope-maestro-v3-DPO-13b/result_2023-11-30 08:14:03.json b/MarkrAI/DopeorNope-maestro-v3-DPO-13b/result_2023-11-30 08:14:03.json new file mode 100644 index 0000000000000000000000000000000000000000..9717e0963a8d938d5ce4b24a436b894926947893 --- /dev/null +++ b/MarkrAI/DopeorNope-maestro-v3-DPO-13b/result_2023-11-30 08:14:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3873720136518771, + "acc_stderr": 0.014235872487909865, + "acc_norm": 0.44368600682593856, + "acc_norm_stderr": 0.014518421825670447 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42093208524198367, + "acc_stderr": 0.004926996830194231, + "acc_norm": 0.5696076478789086, + "acc_norm_stderr": 0.0049411916073179105 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.01781040392543535, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.01781040392543535 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977978, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977978 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866767, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866767 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.03158539157745637, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.03158539157745637 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.040894654493255835, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.040894654493255835 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.021311335009708575, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.021311335009708575 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.01969145905235415, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.01969145905235415 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150381, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150381 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159696, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159696 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.333116036505867, + "acc_stderr": 0.012037930451512052, + "acc_norm": 0.333116036505867, + "acc_norm_stderr": 0.012037930451512052 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237024, + "mc2": 0.4173314540045968, + "mc2_stderr": 0.014766350516789333 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727637, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/DopeorNope-maestro-v3-DPO-13b", + "model_sha": "2b13d8118774db16fd5c520866865674899f3240", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/DopeorNope-maestro-v4-DPO-13b/result_2023-11-30 12:53:00.json b/MarkrAI/DopeorNope-maestro-v4-DPO-13b/result_2023-11-30 12:53:00.json new file mode 100644 index 0000000000000000000000000000000000000000..b0a863d7da4647917440ad45337dbab29775a839 --- /dev/null +++ b/MarkrAI/DopeorNope-maestro-v4-DPO-13b/result_2023-11-30 12:53:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4035836177474403, + "acc_stderr": 0.014337158914268436, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007109 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4304919338777136, + "acc_stderr": 0.004941331215598551, + "acc_norm": 0.5734913363871739, + "acc_norm_stderr": 0.004935587729948866 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.02832032583010592, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.02832032583010592 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296546, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296546 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561953, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561953 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.03058805297427065, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.03058805297427065 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432564, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717861, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717861 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5431192660550459, + "acc_stderr": 0.02135745878522622, + "acc_norm": 0.5431192660550459, + "acc_norm_stderr": 0.02135745878522622 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468008, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468008 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215923, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215923 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3500651890482399, + "acc_stderr": 0.012182552313215179, + "acc_norm": 0.3500651890482399, + "acc_norm_stderr": 0.012182552313215179 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.01594506858123661, + "mc2": 0.46546644792049385, + "mc2_stderr": 0.015216431408315606 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.512396694214876, + "acc_stderr": 0.01718506973267653, + "acc_norm": 0.58913813459268, + "acc_norm_stderr": 0.01691497276784105 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/DopeorNope-maestro-v4-DPO-13b", + "model_sha": "3e95480ac846fcbcfa758b1a1f3d54230a810c47", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/kyujin-CoTy-platypus-ko-12.8b/result_2023-10-03 18:47:45.json b/MarkrAI/kyujin-CoTy-platypus-ko-12.8b/result_2023-10-03 18:47:45.json new file mode 100644 index 0000000000000000000000000000000000000000..8825c9be8b614073fcfc50d046d956b63dec3468 --- /dev/null +++ b/MarkrAI/kyujin-CoTy-platypus-ko-12.8b/result_2023-10-03 18:47:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28668941979522183, + "acc_stderr": 0.013214986329274755, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.013936809212158287 + }, + "harness|ko_hellaswag|10": { + "acc": 0.383788090021908, + "acc_stderr": 0.004853134271547759, + "acc_norm": 0.4911372236606254, + "acc_norm_stderr": 0.004988997467134492 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.01574549716904906, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.01574549716904906 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977148, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977148 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789413, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789413 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.026236965881153266, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.026236965881153266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239952, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241238, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729602, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729602 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577612, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577612 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25871559633027524, + "acc_stderr": 0.01877605231961962, + "acc_norm": 0.25871559633027524, + "acc_norm_stderr": 0.01877605231961962 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1349206349206349, + "acc_stderr": 0.030557101589417508, + "acc_norm": 0.1349206349206349, + "acc_norm_stderr": 0.030557101589417508 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.025261691219729484, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.025261691219729484 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.018152871051538812, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.018152871051538812 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859676, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859676 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859924, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859924 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777562, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777562 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26792698826597133, + "acc_stderr": 0.011311347690633872, + "acc_norm": 0.26792698826597133, + "acc_norm_stderr": 0.011311347690633872 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2252141982864137, + "mc1_stderr": 0.014623240768023479, + "mc2": 0.3758708542635285, + "mc2_stderr": 0.014474804257008467 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2892561983471074, + "acc_stderr": 0.015588800386053557, + "acc_norm": 0.3152302243211334, + "acc_norm_stderr": 0.01597353492379448 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/kyujin-CoTy-platypus-ko-12.8b", + "model_sha": "f37be07af5a262fa9c37c33dcbff3b8702eac9a4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/kyujin-Poly-platypus-ko-12.8b/result_2023-10-02 06:47:06.json b/MarkrAI/kyujin-Poly-platypus-ko-12.8b/result_2023-10-02 06:47:06.json new file mode 100644 index 0000000000000000000000000000000000000000..15c303246ea168a0e80dd39d2bb16c65b0aea360 --- /dev/null +++ b/MarkrAI/kyujin-Poly-platypus-ko-12.8b/result_2023-10-02 06:47:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30887372013651876, + "acc_stderr": 0.013501770929344003, + "acc_norm": 0.3515358361774744, + "acc_norm_stderr": 0.013952413699600933 + }, + "harness|ko_hellaswag|10": { + "acc": 0.391256721768572, + "acc_stderr": 0.004870342592915049, + "acc_norm": 0.5038836885082653, + "acc_norm_stderr": 0.004989630887066195 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038245, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038245 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.015745497169049057, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.015745497169049057 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838752, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838752 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596916, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596916 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026928, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026928 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899098, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899098 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357301, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906865, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906865 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.02454761779480383, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.02454761779480383 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22752293577981653, + "acc_stderr": 0.0179744635787765, + "acc_norm": 0.22752293577981653, + "acc_norm_stderr": 0.0179744635787765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.025261691219729487, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.025261691219729487 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859676, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859676 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.022571771025494757, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.022571771025494757 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.0298180247497531, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.0298180247497531 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25945241199478486, + "acc_stderr": 0.011195262076350309, + "acc_norm": 0.25945241199478486, + "acc_norm_stderr": 0.011195262076350309 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624337, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624337 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23011015911872704, + "mc1_stderr": 0.01473455795980776, + "mc2": 0.38739814063055383, + "mc2_stderr": 0.01474443864761987 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091115, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.016272952997019124 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/kyujin-Poly-platypus-ko-12.8b", + "model_sha": "cc48d722e28e785ef32b05f4ef0246df177af942", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Megastudy/M-SOLAR-10.7B-v1.1-beta/result_2023-12-29 00:13:06.json b/Megastudy/M-SOLAR-10.7B-v1.1-beta/result_2023-12-29 00:13:06.json new file mode 100644 index 0000000000000000000000000000000000000000..6f36fe37f0dc2ffa7983f904302ef19807c51d22 --- /dev/null +++ b/Megastudy/M-SOLAR-10.7B-v1.1-beta/result_2023-12-29 00:13:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4590443686006826, + "acc_stderr": 0.014562291073601234, + "acc_norm": 0.5170648464163823, + "acc_norm_stderr": 0.014602878388536591 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4457279426409082, + "acc_stderr": 0.004960299952519394, + "acc_norm": 0.6086436964748058, + "acc_norm_stderr": 0.004870563921220625 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6491228070175439, + "acc_stderr": 0.03660298834049164, + "acc_norm": 0.6491228070175439, + "acc_norm_stderr": 0.03660298834049164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.669220945083014, + "acc_stderr": 0.01682481846256376, + "acc_norm": 0.669220945083014, + "acc_norm_stderr": 0.01682481846256376 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5852090032154341, + "acc_stderr": 0.027982680459759567, + "acc_norm": 0.5852090032154341, + "acc_norm_stderr": 0.027982680459759567 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262973, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262973 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646847, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646847 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5358974358974359, + "acc_stderr": 0.025285585990017862, + "acc_norm": 0.5358974358974359, + "acc_norm_stderr": 0.025285585990017862 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6258064516129033, + "acc_stderr": 0.027528904299845683, + "acc_norm": 0.6258064516129033, + "acc_norm_stderr": 0.027528904299845683 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725198, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725198 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.02555992055053101, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.02555992055053101 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5625, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5895953757225434, + "acc_stderr": 0.02648339204209818, + "acc_norm": 0.5895953757225434, + "acc_norm_stderr": 0.02648339204209818 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.027002521034516468, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.027002521034516468 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.03239637046735703, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.03239637046735703 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7119266055045872, + "acc_stderr": 0.019416445892636025, + "acc_norm": 0.7119266055045872, + "acc_norm_stderr": 0.019416445892636025 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.02849199358617156, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.02849199358617156 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302871, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302871 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02022394600507431, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02022394600507431 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.02942799403941999, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.02942799403941999 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.1877094972067039, + "acc_stderr": 0.013059605303257065, + "acc_norm": 0.1877094972067039, + "acc_norm_stderr": 0.013059605303257065 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5183823529411765, + "acc_stderr": 0.03035230339535196, + "acc_norm": 0.5183823529411765, + "acc_norm_stderr": 0.03035230339535196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.02747974455080851, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.02747974455080851 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41851368970013036, + "acc_stderr": 0.01259950560833648, + "acc_norm": 0.41851368970013036, + "acc_norm_stderr": 0.01259950560833648 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.0161857443551449, + "mc2": 0.47119481512690015, + "mc2_stderr": 0.0153427666032473 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5749704840613932, + "acc_stderr": 0.016996016308362887, + "acc_norm": 0.6233766233766234, + "acc_norm_stderr": 0.016658799874051985 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Megastudy/M-SOLAR-10.7B-v1.1-beta", + "model_sha": "2bdd9a00217c3fdd9ec6f4d966e5383a529bd0f5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Merge_test01/result_2023-12-08 04:54:05.json b/Minirecord/Merge_test01/result_2023-12-08 04:54:05.json new file mode 100644 index 0000000000000000000000000000000000000000..61a098b55484a91e4c681bef87d1b77f9fded6d4 --- /dev/null +++ b/Minirecord/Merge_test01/result_2023-12-08 04:54:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42918088737201365, + "acc_stderr": 0.014464085894870651, + "acc_norm": 0.48208191126279865, + "acc_norm_stderr": 0.01460200558549098 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41256721768571997, + "acc_stderr": 0.0049129004503708365, + "acc_norm": 0.5376419040031866, + "acc_norm_stderr": 0.004975621147406099 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5874840357598978, + "acc_stderr": 0.017604149108671925, + "acc_norm": 0.5874840357598978, + "acc_norm_stderr": 0.017604149108671925 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999998, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999998 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.02534800603153475, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.02534800603153475 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.0487831731214563, + "acc_norm": 0.62, + "acc_norm_stderr": 0.0487831731214563 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206184, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206184 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5596330275229358, + "acc_stderr": 0.021284310623761543, + "acc_norm": 0.5596330275229358, + "acc_norm_stderr": 0.021284310623761543 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.0439025926537756, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.0439025926537756 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.041733491480834974, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.041733491480834974 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.040633027314866704, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.040633027314866704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.01992211568278668, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.01992211568278668 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20670391061452514, + "acc_stderr": 0.013543260867834457, + "acc_norm": 0.20670391061452514, + "acc_norm_stderr": 0.013543260867834457 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125468, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125468 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.03078154910202621, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.03078154910202621 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.01222362336404404, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.01222362336404404 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3537331701346389, + "mc1_stderr": 0.01673781435884615, + "mc2": 0.5166103125164593, + "mc2_stderr": 0.015886971566784123 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46162927981109797, + "acc_stderr": 0.01713966022184556, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Merge_test01", + "model_sha": "b65c745bab164c7292374a9df9119c61117a382e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_DPO_test02/result_2023-11-30 05:44:03.json b/Minirecord/Mini_DPO_test02/result_2023-11-30 05:44:03.json new file mode 100644 index 0000000000000000000000000000000000000000..ad61a762fa7777610f2e56de3b8c5d332424e488 --- /dev/null +++ b/Minirecord/Mini_DPO_test02/result_2023-11-30 05:44:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38993174061433444, + "acc_stderr": 0.01425295984889288, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526838 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4025094602668791, + "acc_stderr": 0.0048940125556426375, + "acc_norm": 0.5183230432184823, + "acc_norm_stderr": 0.004986429808146767 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041982, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.03524068951567449, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.03524068951567449 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431183, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808086, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808086 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066482, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562417, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354147, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354147 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534774, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534774 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025445, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303672, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303672 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254184, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254184 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.01195284080964657, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.01195284080964657 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006518, + "mc2": 0.41568147235613934, + "mc2_stderr": 0.016165339412432606 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998567, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_DPO_test02", + "model_sha": "cd417467644c4178100083e342bad88a3f968be6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_DPO_test_01/result_2023-11-29 11:24:57.json b/Minirecord/Mini_DPO_test_01/result_2023-11-29 11:24:57.json new file mode 100644 index 0000000000000000000000000000000000000000..d079b7359502396642ecf95585c391eea0401ae1 --- /dev/null +++ b/Minirecord/Mini_DPO_test_01/result_2023-11-29 11:24:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44197952218430037, + "acc_stderr": 0.014512682523128343, + "acc_norm": 0.48293515358361777, + "acc_norm_stderr": 0.014602878388536595 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4159529974108743, + "acc_stderr": 0.004918781662373929, + "acc_norm": 0.5468034256124278, + "acc_norm_stderr": 0.00496787247538328 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913237, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913237 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5759897828863346, + "acc_stderr": 0.01767226332908422, + "acc_norm": 0.5759897828863346, + "acc_norm_stderr": 0.01767226332908422 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.032321469162244675, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.032321469162244675 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.041379310344827586, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.041379310344827586 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171451, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171451 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534805, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534805 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883233, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883233 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343118, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.033455630703391914, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.033455630703391914 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137282, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137282 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670788, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670788 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5394495412844037, + "acc_stderr": 0.021370494609995103, + "acc_norm": 0.5394495412844037, + "acc_norm_stderr": 0.021370494609995103 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.040675331363091746, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.040675331363091746 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503803, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503803 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19664804469273742, + "acc_stderr": 0.01329318302745465, + "acc_norm": 0.19664804469273742, + "acc_norm_stderr": 0.01329318302745465 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.03038193194999042, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.03038193194999042 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897632, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897632 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386844, + "mc2": 0.47779817930728524, + "mc2_stderr": 0.015859968375893638 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5100354191263282, + "acc_stderr": 0.01718689128689406, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549346 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_DPO_test_01", + "model_sha": "ca901b8afa7f11d4cd192b3086eeaed0960edcd6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_Orca_daekeun_llama13b/result_2023-11-30 09:21:03.json b/Minirecord/Mini_Orca_daekeun_llama13b/result_2023-11-30 09:21:03.json new file mode 100644 index 0000000000000000000000000000000000000000..3c2cfb4b89059250e4f4fc9ee9bc17abb0fcdea3 --- /dev/null +++ b/Minirecord/Mini_Orca_daekeun_llama13b/result_2023-11-30 09:21:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4035836177474403, + "acc_stderr": 0.014337158914268436, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007109 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43019318860784705, + "acc_stderr": 0.004940911779273365, + "acc_norm": 0.5732921728739295, + "acc_norm_stderr": 0.004935882666250484 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633938, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.02832032583010592, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.02832032583010592 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296546, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296546 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561953, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561953 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432564, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290313, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290313 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.019542101564854107, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.019542101564854107 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741518, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.01594506858123661, + "mc2": 0.46568684481633393, + "mc2_stderr": 0.015215490629764046 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.512396694214876, + "acc_stderr": 0.01718506973267653, + "acc_norm": 0.5903187721369539, + "acc_norm_stderr": 0.01690756819221948 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_Orca_daekeun_llama13b", + "model_sha": "6b20fd25f7afade5959ab4529b151eac8fba7798", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_llama13b_test123/result_2023-12-12 09:38:52.json b/Minirecord/Mini_llama13b_test123/result_2023-12-12 09:38:52.json new file mode 100644 index 0000000000000000000000000000000000000000..a033c6fc45a160caef940060844d809e1a94a741 --- /dev/null +++ b/Minirecord/Mini_llama13b_test123/result_2023-12-12 09:38:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225398, + "acc_norm": 0.48208191126279865, + "acc_norm_stderr": 0.014602005585490982 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44064927305317664, + "acc_stderr": 0.004954503606471607, + "acc_norm": 0.5898227444732125, + "acc_norm_stderr": 0.004908604732082816 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.545338441890166, + "acc_stderr": 0.017806304585052595, + "acc_norm": 0.545338441890166, + "acc_norm_stderr": 0.017806304585052595 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.02832032583010592, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.02832032583010592 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.025230381238934833, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.025230381238934833 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228426, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228426 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.021311335009708575, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.021311335009708575 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523812, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523812 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292535, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292535 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.019659922493623333, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.019659922493623333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103982, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103982 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687754, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687754 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330375, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330375 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.0159835951018114, + "mc2": 0.4673869792275578, + "mc2_stderr": 0.01544648001501871 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.01719024627623186, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_llama13b_test123", + "model_sha": "8793a7abc5806d5796463aeed91a4b2406d7b9f2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_synatra_7b_01/result_2023-11-21 10:22:04.json b/Minirecord/Mini_synatra_7b_01/result_2023-11-21 10:22:04.json new file mode 100644 index 0000000000000000000000000000000000000000..a7f14c5a3db0f69e6a3d6ad87dcd1e388091c421 --- /dev/null +++ b/Minirecord/Mini_synatra_7b_01/result_2023-11-21 10:22:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.439419795221843, + "acc_stderr": 0.014503747823580127, + "acc_norm": 0.4778156996587031, + "acc_norm_stderr": 0.014597001927076136 + }, + "harness|ko_hellaswag|10": { + "acc": 0.407787293367855, + "acc_stderr": 0.004904189257891273, + "acc_norm": 0.5388368850826528, + "acc_norm_stderr": 0.004974706428434281 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5721583652618135, + "acc_stderr": 0.017692787927803724, + "acc_norm": 0.5721583652618135, + "acc_norm_stderr": 0.017692787927803724 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056127, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056127 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006114, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006114 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305527, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305527 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5357798165137615, + "acc_stderr": 0.021382364775701906, + "acc_norm": 0.5357798165137615, + "acc_norm_stderr": 0.021382364775701906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21340782122905028, + "acc_stderr": 0.0137028599321961, + "acc_norm": 0.21340782122905028, + "acc_norm_stderr": 0.0137028599321961 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824873, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824873 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353592, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353592 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713606, + "mc2": 0.4699224627284617, + "mc2_stderr": 0.015489423242321306 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.01717567127983645, + "acc_norm": 0.5218417945690673, + "acc_norm_stderr": 0.017173944474294378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_synatra_7b_01", + "model_sha": "1851fd437f8956055d6dc61bb1e1628b9ffdbaed", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_synatra_7b_02/result_2023-11-22 01:45:18.json b/Minirecord/Mini_synatra_7b_02/result_2023-11-22 01:45:18.json new file mode 100644 index 0000000000000000000000000000000000000000..69694ab224552f66b8bab4eea9c7b4b106d89fc3 --- /dev/null +++ b/Minirecord/Mini_synatra_7b_02/result_2023-11-22 01:45:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4351535836177474, + "acc_stderr": 0.014487986197186047, + "acc_norm": 0.48464163822525597, + "acc_norm_stderr": 0.01460449612939491 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4062935670185222, + "acc_stderr": 0.0049013686295334155, + "acc_norm": 0.5382393945429197, + "acc_norm_stderr": 0.004975167382061836 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5721583652618135, + "acc_stderr": 0.017692787927803724, + "acc_norm": 0.5721583652618135, + "acc_norm_stderr": 0.017692787927803724 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.032278345101462685, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.032278345101462685 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.038743715565879536, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.038743715565879536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412205, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412205 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.029343114798094462, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.029343114798094462 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.026772990653361823, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.026772990653361823 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336936, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400355, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829166, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829166 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19553072625698323, + "acc_stderr": 0.013264579220945082, + "acc_norm": 0.19553072625698323, + "acc_norm_stderr": 0.013264579220945082 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556166, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556166 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.030381931949990424, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.030381931949990424 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452224, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070262, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070262 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.01605899902610063, + "mc2": 0.47052567317541716, + "mc2_stderr": 0.015507149282229802 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_synatra_7b_02", + "model_sha": "cdea3a61219bec471946ed8330250119b6d45df0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_synatra_7b_03/result_2023-11-23 08:17:10.json b/Minirecord/Mini_synatra_7b_03/result_2023-11-23 08:17:10.json new file mode 100644 index 0000000000000000000000000000000000000000..861ba3792b7f9425b5f55a6bea87f063b431548d --- /dev/null +++ b/Minirecord/Mini_synatra_7b_03/result_2023-11-23 08:17:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4334470989761092, + "acc_stderr": 0.014481376224558896, + "acc_norm": 0.4684300341296928, + "acc_norm_stderr": 0.014582236460866977 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4069906393148775, + "acc_stderr": 0.004902690765066419, + "acc_norm": 0.5370444134634534, + "acc_norm_stderr": 0.004976067726432564 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.561941251596424, + "acc_stderr": 0.017742232238257254, + "acc_norm": 0.561941251596424, + "acc_norm_stderr": 0.017742232238257254 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.03446897738659333, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.03446897738659333 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606647, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606647 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02510742548113728, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02510742548113728 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.042857142857142816, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.042857142857142816 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18435754189944134, + "acc_stderr": 0.012969152811883447, + "acc_norm": 0.18435754189944134, + "acc_norm_stderr": 0.012969152811883447 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.035086373586305716, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.035086373586305716 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882473, + "mc2": 0.4780289931552814, + "mc2_stderr": 0.015543517557297408 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.01713321827653767 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_synatra_7b_03", + "model_sha": "6cd5c432930e923b0d73453fb22f817726da99bc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/llama13b_2s_dpo/result_2023-12-15 08:29:12.json b/Minirecord/llama13b_2s_dpo/result_2023-12-15 08:29:12.json new file mode 100644 index 0000000000000000000000000000000000000000..127546cd80ce6b9b890fa22ec698c2d37dca4ece --- /dev/null +++ b/Minirecord/llama13b_2s_dpo/result_2023-12-15 08:29:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4232081911262799, + "acc_stderr": 0.014438036220848029, + "acc_norm": 0.4812286689419795, + "acc_norm_stderr": 0.014601090150633964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42850029874526985, + "acc_stderr": 0.004938500303990289, + "acc_norm": 0.5737900816570405, + "acc_norm_stderr": 0.004935143791573811 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633938, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328167, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328167 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.02521731518484648, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.02521731518484648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036544, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036544 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228405, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261114, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261114 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319567, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319567 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.02838425670488304, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.02838425670488304 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776122, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33572359843546284, + "acc_stderr": 0.012061304157664621, + "acc_norm": 0.33572359843546284, + "acc_norm_stderr": 0.012061304157664621 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31456548347613217, + "mc1_stderr": 0.016255241993179185, + "mc2": 0.48394875080657285, + "mc2_stderr": 0.015560465036731122 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077307, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.01672857970149866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/llama13b_2s_dpo", + "model_sha": "feec8b40b923b8862a98a726d581770210316715", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/llama13b_dpo_loss0_OTL/result_2023-12-07 02:09:10.json b/Minirecord/llama13b_dpo_loss0_OTL/result_2023-12-07 02:09:10.json new file mode 100644 index 0000000000000000000000000000000000000000..ad2066757d0f9d52e4b164aeb314150598ec9303 --- /dev/null +++ b/Minirecord/llama13b_dpo_loss0_OTL/result_2023-12-07 02:09:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4274744027303754, + "acc_stderr": 0.014456862944650652, + "acc_norm": 0.47525597269624575, + "acc_norm_stderr": 0.01459348769493774 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43527185819557856, + "acc_stderr": 0.004947793051042669, + "acc_norm": 0.5808603863772157, + "acc_norm_stderr": 0.004924098711864585 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5351213282247765, + "acc_stderr": 0.017835798806290645, + "acc_norm": 0.5351213282247765, + "acc_norm_stderr": 0.017835798806290645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5466237942122186, + "acc_stderr": 0.028274359854894245, + "acc_norm": 0.5466237942122186, + "acc_norm_stderr": 0.028274359854894245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.02523038123893483, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.02523038123893483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883232, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883232 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823017, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823017 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319563, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319563 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604675, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604675 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290317, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290317 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866342, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866342 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608044, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125478, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125478 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.0122386157503165, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.0122386157503165 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711276, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711276 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087295, + "mc2": 0.43857065442012383, + "mc2_stderr": 0.015188413191278269 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5525383707201889, + "acc_stderr": 0.01709519030150058, + "acc_norm": 0.6269185360094451, + "acc_norm_stderr": 0.01662731827513745 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/llama13b_dpo_loss0_OTL", + "model_sha": "d0fe605aa90c5f43e188cdfa7e07456f4ad3eef9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/llama13b_test02/result_2023-12-01 09:43:54.json b/Minirecord/llama13b_test02/result_2023-12-01 09:43:54.json new file mode 100644 index 0000000000000000000000000000000000000000..568e737dd8803c273658bfc3c4aa9bd6fded367a --- /dev/null +++ b/Minirecord/llama13b_test02/result_2023-12-01 09:43:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.0144418896274644, + "acc_norm": 0.4709897610921502, + "acc_norm_stderr": 0.014586776355294324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43507269468233417, + "acc_stderr": 0.0049475331587120995, + "acc_norm": 0.5792670782712607, + "acc_norm_stderr": 0.004926678108601345 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5300127713920817, + "acc_stderr": 0.017847723086649073, + "acc_norm": 0.5300127713920817, + "acc_norm_stderr": 0.017847723086649073 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.02521731518484648, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.02521731518484648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.031342504862454025, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.031342504862454025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535892, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535892 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.040894654493255835, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.040894654493255835 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144809, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144809 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5651376146788991, + "acc_stderr": 0.021254631465609273, + "acc_norm": 0.5651376146788991, + "acc_norm_stderr": 0.021254631465609273 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02782610930728369, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02782610930728369 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.01957695312208885, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.01957695312208885 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.01226793547751903, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.01226793547751903 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.01546102762725359, + "mc2": 0.43217399968615017, + "mc2_stderr": 0.015131631313211043 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5442739079102715, + "acc_stderr": 0.01712282914329265, + "acc_norm": 0.6245572609208973, + "acc_norm_stderr": 0.016648411589511088 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/llama13b_test02", + "model_sha": "97748555478dff9ff9c3e05e0b72e02f7e04bdd0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/minyi_5k_6B/result_2023-12-27 06:48:32.json b/Minirecord/minyi_5k_6B/result_2023-12-27 06:48:32.json new file mode 100644 index 0000000000000000000000000000000000000000..34d2d56c4d0c91bc4636534d159846961410b05b --- /dev/null +++ b/Minirecord/minyi_5k_6B/result_2023-12-27 06:48:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.01411129875167495, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256527 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40201155148376816, + "acc_stderr": 0.004893022130229093, + "acc_norm": 0.536247759410476, + "acc_norm_stderr": 0.004976651989757641 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.017784034534992436, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.017784034534992436 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182087, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182087 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986476, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986476 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173092, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173092 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114982, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114982 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149135, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149135 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.02684298551961537, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.02684298551961537 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988327, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988327 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.028408302020332694, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.028408302020332694 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.01967580813528152, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.01967580813528152 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30837988826815643, + "acc_stderr": 0.015445716910998884, + "acc_norm": 0.30837988826815643, + "acc_norm_stderr": 0.015445716910998884 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001663, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001663 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.012117939998705878, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.012117939998705878 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777305, + "mc2": 0.42634355103551425, + "mc2_stderr": 0.0150819870012603 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5112160566706021, + "acc_stderr": 0.017186028469489287, + "acc_norm": 0.5820543093270366, + "acc_norm_stderr": 0.016957292005279703 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/minyi_5k_6B", + "model_sha": "966f5dd17f723d62b8d5ab7387461eae5a048fc6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/minyi_6b/result_2023-12-07 10:34:18.json b/Minirecord/minyi_6b/result_2023-12-07 10:34:18.json new file mode 100644 index 0000000000000000000000000000000000000000..ecbeedab897df9561d7a8cf98cb0caa5c6a8c1e7 --- /dev/null +++ b/Minirecord/minyi_6b/result_2023-12-07 10:34:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.0140702655192688, + "acc_norm": 0.4206484641638225, + "acc_norm_stderr": 0.014426211252508397 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40509858593905596, + "acc_stderr": 0.004899078300184257, + "acc_norm": 0.5417247560246963, + "acc_norm_stderr": 0.004972377085916328 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.545338441890166, + "acc_stderr": 0.017806304585052602, + "acc_norm": 0.545338441890166, + "acc_norm_stderr": 0.017806304585052602 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.02819640057419743, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.02819640057419743 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126174, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126174 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.02842920317672455, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.02842920317672455 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349483, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349483 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523864, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.020920058346111062, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.020920058346111062 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963757, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963757 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380025, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380025 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.4285492447923733, + "mc2_stderr": 0.015103565647608173 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.551357733175915, + "acc_stderr": 0.017099430514725778, + "acc_norm": 0.6162927981109799, + "acc_norm_stderr": 0.016718924637231826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/minyi_6b", + "model_sha": "f8137f2ed10ff1496e75729ed15fad480073a7e4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/minyi_dpo_6b/result_2023-12-18 09:01:03.json b/Minirecord/minyi_dpo_6b/result_2023-12-18 09:01:03.json new file mode 100644 index 0000000000000000000000000000000000000000..212b69dfe1e57e8d7da62a6bba31fdd03e686d21 --- /dev/null +++ b/Minirecord/minyi_dpo_6b/result_2023-12-18 09:01:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3643344709897611, + "acc_stderr": 0.014063260279882417, + "acc_norm": 0.4283276450511945, + "acc_norm_stderr": 0.014460496367599019 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4122684724158534, + "acc_stderr": 0.004912370023913013, + "acc_norm": 0.5438159729137622, + "acc_norm_stderr": 0.004970585328297623 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5798212005108557, + "acc_stderr": 0.017650651363078022, + "acc_norm": 0.5798212005108557, + "acc_norm_stderr": 0.017650651363078022 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04316378599511324, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04316378599511324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643945, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643945 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.02523038123893483, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.02523038123893483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299604, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.03400598505599015, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.03400598505599015 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.02441923496681906, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.02441923496681906 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651283, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651283 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336938, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336938 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6018348623853211, + "acc_stderr": 0.02098798942265426, + "acc_norm": 0.6018348623853211, + "acc_norm_stderr": 0.02098798942265426 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061173, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061173 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.0278079901413202, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.0278079901413202 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915185, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915185 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220504, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220504 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34028683181225555, + "acc_stderr": 0.0121012176102238, + "acc_norm": 0.34028683181225555, + "acc_norm_stderr": 0.0121012176102238 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4432499193765067, + "mc2_stderr": 0.01527417237825953 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5548996458087367, + "acc_stderr": 0.017086417431005467, + "acc_norm": 0.6269185360094451, + "acc_norm_stderr": 0.01662731827513745 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/minyi_dpo_6b", + "model_sha": "61066958700e4fda47e6381d3cbc4b2736373868", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/psm_170k_llama_13b/result_2023-12-18 09:04:29.json b/Minirecord/psm_170k_llama_13b/result_2023-12-18 09:04:29.json new file mode 100644 index 0000000000000000000000000000000000000000..e55f3d7261e6f194b5fb268c5221de05199d4fa8 --- /dev/null +++ b/Minirecord/psm_170k_llama_13b/result_2023-12-18 09:04:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4138225255972696, + "acc_stderr": 0.014392730009221009, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.01457014449507558 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43696474805815577, + "acc_stderr": 0.004949969363017663, + "acc_norm": 0.5916152160924119, + "acc_norm_stderr": 0.0049053043710908725 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.541507024265645, + "acc_stderr": 0.01781824860346558, + "acc_norm": 0.541507024265645, + "acc_norm_stderr": 0.01781824860346558 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562807, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562807 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.43, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969567, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813332, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5346820809248555, + "acc_stderr": 0.026854257928258886, + "acc_norm": 0.5346820809248555, + "acc_norm_stderr": 0.026854257928258886 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.042032772914677614, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.042032772914677614 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610805, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610805 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330366, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330366 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386844, + "mc2": 0.4779022257635535, + "mc2_stderr": 0.015265649173984836 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.01715407371668287, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.016747577991642785 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/psm_170k_llama_13b", + "model_sha": "0a20211ff6d1c3414f1afe566f98c3619c29b074", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/psm_llama13b/result_2023-12-11 00:31:15.json b/Minirecord/psm_llama13b/result_2023-12-11 00:31:15.json new file mode 100644 index 0000000000000000000000000000000000000000..6d64e707ddf9bb4df01caf927e140cdcd7f410fe --- /dev/null +++ b/Minirecord/psm_llama13b/result_2023-12-11 00:31:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41552901023890787, + "acc_stderr": 0.014401366641216377, + "acc_norm": 0.47440273037542663, + "acc_norm_stderr": 0.014592230885298964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4320852419836686, + "acc_stderr": 0.004943537242344414, + "acc_norm": 0.5786695877315275, + "acc_norm_stderr": 0.004927631806477556 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913237, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913237 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.017855434554041996, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.017855434554041996 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.03550702465131342, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.03550702465131342 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.02523038123893483, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.02523038123893483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724562, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724562 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556552, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556552 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261117, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261117 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637793, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637793 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.021311335009708575, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.021311335009708575 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0301619119307671, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0301619119307671 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.03121956944530185, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.03121956944530185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.01204966898321494, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.01204966898321494 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589674, + "mc2": 0.46116641220870364, + "mc2_stderr": 0.015268627780335149 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5596221959858324, + "acc_stderr": 0.017067699774312977, + "acc_norm": 0.6292798110979929, + "acc_norm_stderr": 0.01660580128921262 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/psm_llama13b", + "model_sha": "37f58c653ff3f239a4dee90249728c1529e442e0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MrBananaHuman/kogpt2_small/result_2023-10-31 22:00:17.json b/MrBananaHuman/kogpt2_small/result_2023-10-31 22:00:17.json new file mode 100644 index 0000000000000000000000000000000000000000..dbf46a7089e5d61f632e51d438b6477b8bc3084a --- /dev/null +++ b/MrBananaHuman/kogpt2_small/result_2023-10-31 22:00:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20136518771331058, + "acc_stderr": 0.011718927477444262, + "acc_norm": 0.23976109215017063, + "acc_norm_stderr": 0.012476304127453949 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2751443935471022, + "acc_stderr": 0.004456743108170736, + "acc_norm": 0.2885879306910974, + "acc_norm_stderr": 0.00452179857792214 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.01588988836256049, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.01588988836256049 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977148, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977148 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.029376616484945633, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.029376616484945633 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.02835962087053395, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.02835962087053395 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026928, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026928 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.02560423347089911, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.02560423347089911 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959323, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959323 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834839, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834839 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615625, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615625 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22839506172839505, + "acc_stderr": 0.023358211840626267, + "acc_norm": 0.22839506172839505, + "acc_norm_stderr": 0.023358211840626267 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147602, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147602 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.01812566918086149, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.01812566918086149 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.21487603305785125, + "acc_stderr": 0.03749492448709695, + "acc_norm": 0.21487603305785125, + "acc_norm_stderr": 0.03749492448709695 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537537, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2198581560283688, + "acc_stderr": 0.024706141070705477, + "acc_norm": 0.2198581560283688, + "acc_norm_stderr": 0.024706141070705477 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.02752963744017493, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.02752963744017493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23533246414602346, + "acc_stderr": 0.010834432543912231, + "acc_norm": 0.23533246414602346, + "acc_norm_stderr": 0.010834432543912231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253602, + "mc2": 0.4869463974456989, + "mc2_stderr": 0.015810411656532714 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3412042502951594, + "acc_stderr": 0.016300368742137313, + "acc_norm": 0.4982290436835891, + "acc_norm_stderr": 0.01719024627623187 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MrBananaHuman/kogpt2_small", + "model_sha": "3fb81f245efeec7837bc92af2b9ffaa5d1157a48", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MrBananaHuman/polyglot-ko-5.8b/result_2023-11-06 09:18:33.json b/MrBananaHuman/polyglot-ko-5.8b/result_2023-11-06 09:18:33.json new file mode 100644 index 0000000000000000000000000000000000000000..7291f3ddda9eb20289b3dd196ef37ba8dea5a407 --- /dev/null +++ b/MrBananaHuman/polyglot-ko-5.8b/result_2023-11-06 09:18:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2986348122866894, + "acc_stderr": 0.013374078615068754, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.013936809212158296 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39294961163114916, + "acc_stderr": 0.004874076250521577, + "acc_norm": 0.4985062736506672, + "acc_norm_stderr": 0.004989759144812286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0330140594698725, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0330140594698725 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26053639846743293, + "acc_stderr": 0.015696008563807096, + "acc_norm": 0.26053639846743293, + "acc_norm_stderr": 0.015696008563807096 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292326, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292326 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.03329394119073529, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.03329394119073529 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21864951768488747, + "acc_stderr": 0.02347558141786111, + "acc_norm": 0.21864951768488747, + "acc_norm_stderr": 0.02347558141786111 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2556053811659193, + "acc_stderr": 0.029275891003969927, + "acc_norm": 0.2556053811659193, + "acc_norm_stderr": 0.029275891003969927 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.1984732824427481, + "acc_stderr": 0.0349814938546247, + "acc_norm": 0.1984732824427481, + "acc_norm_stderr": 0.0349814938546247 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.0307463007421245, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.0307463007421245 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135303, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135303 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.02835962087053395, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.02835962087053395 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20512820512820512, + "acc_stderr": 0.020473233173551986, + "acc_norm": 0.20512820512820512, + "acc_norm_stderr": 0.020473233173551986 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.029678333141444455, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.029678333141444455 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23548387096774193, + "acc_stderr": 0.02413763242933771, + "acc_norm": 0.23548387096774193, + "acc_norm_stderr": 0.02413763242933771 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.02795182680892433, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.02795182680892433 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3283018867924528, + "acc_stderr": 0.02890159361241178, + "acc_norm": 0.3283018867924528, + "acc_norm_stderr": 0.02890159361241178 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960954, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960954 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014652, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014652 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047875, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047875 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.02193587808118476, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.02193587808118476 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.023083658586984204, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.023083658586984204 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2085889570552147, + "acc_stderr": 0.03192193448934725, + "acc_norm": 0.2085889570552147, + "acc_norm_stderr": 0.03192193448934725 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400572, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400572 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.030031147977641545, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.030031147977641545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.017923087667803053, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.017923087667803053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604672, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604672 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.023152722439402303, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.023152722439402303 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.23140495867768596, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.23140495867768596, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.01774089950917779, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.01774089950917779 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290392, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290392 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882603, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882603 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.02777829870154544, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.02777829870154544 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073142, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073142 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842538, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842538 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417355, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417355 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548299, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548299 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522512, + "mc2": 0.4146123621762204, + "mc2_stderr": 0.015628722266635826 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27390791027154665, + "acc_stderr": 0.01533249947479102, + "acc_norm": 0.29988193624557263, + "acc_norm_stderr": 0.01575344761542946 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MrBananaHuman/polyglot-ko-5.8b", + "model_sha": "05f8800a617b483b1c502d8c965ff434e63d387c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Nara-Lab/nallm-bart/result_2023-09-27 06:09:37.json b/Nara-Lab/nallm-bart/result_2023-09-27 06:09:37.json new file mode 100644 index 0000000000000000000000000000000000000000..19b0755feab761f48a37dee61246012bb98ce9d3 --- /dev/null +++ b/Nara-Lab/nallm-bart/result_2023-09-27 06:09:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20733788395904437, + "acc_stderr": 0.011846905782971363, + "acc_norm": 0.2593856655290102, + "acc_norm_stderr": 0.012808273573927088 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25124477195777734, + "acc_stderr": 0.00432842570099869, + "acc_norm": 0.2526389165504879, + "acc_norm_stderr": 0.004336375492801796 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140474, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140474 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11659192825112108, + "acc_stderr": 0.021539639816244467, + "acc_norm": 0.11659192825112108, + "acc_norm_stderr": 0.021539639816244467 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124505, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124505 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.02851025151234192, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.02851025151234192 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.024121125416941183, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.024121125416941183 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.02528839450289136, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.02528839450289136 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.0222896388526179, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.0222896388526179 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.02525117393649501, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.02525117393649501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.344954128440367, + "acc_stderr": 0.02038060540506697, + "acc_norm": 0.344954128440367, + "acc_norm_stderr": 0.02038060540506697 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1487603305785124, + "acc_stderr": 0.03248470083807195, + "acc_norm": 0.1487603305785124, + "acc_norm_stderr": 0.03248470083807195 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.017952449196987866, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.017952449196987866 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843007, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843007 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145277, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.28270042194092826, + "acc_stderr": 0.02931281415395592, + "acc_norm": 0.28270042194092826, + "acc_norm_stderr": 0.02931281415395592 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.011005971399927246, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.011005971399927246 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548299, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548299 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707684, + "mc2": 0.5034077613881154, + "mc2_stderr": 0.016935414149113398 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.12750885478158205, + "acc_stderr": 0.011467414350410923, + "acc_norm": 0.42384887839433294, + "acc_norm_stderr": 0.01698981083462824 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Nara-Lab/nallm-bart", + "model_sha": "a3a334adbae67f890b4186b5ce5aa4c5d7fbceaf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Nara-Lab/nallm-polyglot-ko-1.3b-base/result_2023-09-27 06:09:53.json b/Nara-Lab/nallm-polyglot-ko-1.3b-base/result_2023-09-27 06:09:53.json new file mode 100644 index 0000000000000000000000000000000000000000..146feda9a29fe40c313e7cdd3f4b64ceeb72fa04 --- /dev/null +++ b/Nara-Lab/nallm-polyglot-ko-1.3b-base/result_2023-09-27 06:09:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25170648464163825, + "acc_stderr": 0.01268249633404297, + "acc_norm": 0.3054607508532423, + "acc_norm_stderr": 0.013460080478002496 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3325034853614818, + "acc_stderr": 0.004701474865207019, + "acc_norm": 0.4032065325632344, + "acc_norm_stderr": 0.004895390341445625 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22860791826309068, + "acc_stderr": 0.015016884698539873, + "acc_norm": 0.22860791826309068, + "acc_norm_stderr": 0.015016884698539873 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2170212765957447, + "acc_stderr": 0.026947483121496238, + "acc_norm": 0.2170212765957447, + "acc_norm_stderr": 0.026947483121496238 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.025403832978179622, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.025403832978179622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168264, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462203, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462203 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365914, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365914 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149354, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149354 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868963, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868963 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.021444547301560476, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.021444547301560476 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617722, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617722 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594528, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594528 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891162, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891162 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.02648035717989569, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.02648035717989569 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230165, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230165 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.29850746268656714, + "acc_stderr": 0.032357437893550424, + "acc_norm": 0.29850746268656714, + "acc_norm_stderr": 0.032357437893550424 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.024659685185967277, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.024659685185967277 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26055045871559634, + "acc_stderr": 0.01881918203485007, + "acc_norm": 0.26055045871559634, + "acc_norm_stderr": 0.01881918203485007 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.024404394928087873, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.024404394928087873 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.03279000406310049, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.03279000406310049 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.017740899509177795, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.017740899509177795 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290396, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176849, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176849 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210756, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210756 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.13, + "acc_stderr": 0.03379976689896309, + "acc_norm": 0.13, + "acc_norm_stderr": 0.03379976689896309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.02737294220178817, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.02737294220178817 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23663624511082137, + "acc_stderr": 0.010855137351572747, + "acc_norm": 0.23663624511082137, + "acc_norm_stderr": 0.010855137351572747 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02933116229425173, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02933116229425173 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548297, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548297 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862685, + "mc2": 0.4101668259727761, + "mc2_stderr": 0.01554453474117709 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26210153482880755, + "acc_stderr": 0.015119864670254154, + "acc_norm": 0.29043683589138136, + "acc_norm_stderr": 0.015607602569814628 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Nara-Lab/nallm-polyglot-ko-1.3b-base", + "model_sha": "8fd7fa9b1b5bbe857f65576e2e37bd600e10ce8c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Nara-Lab/nallm-polyglot-ko-3.8b-base/result_2023-09-27 06:10:01.json b/Nara-Lab/nallm-polyglot-ko-3.8b-base/result_2023-09-27 06:10:01.json new file mode 100644 index 0000000000000000000000000000000000000000..402bee4be306875594dce70d42ee7d6f35b3e515 --- /dev/null +++ b/Nara-Lab/nallm-polyglot-ko-3.8b-base/result_2023-09-27 06:10:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26109215017064846, + "acc_stderr": 0.012835523909473864, + "acc_norm": 0.32337883959044367, + "acc_norm_stderr": 0.013669421630012123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3641704839673372, + "acc_stderr": 0.004802133511654235, + "acc_norm": 0.45727942640908187, + "acc_norm_stderr": 0.004971534874389935 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.031581495393387345, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.031581495393387345 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690876, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690876 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398687, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398687 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.028504856470514196, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.028504856470514196 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288086, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288086 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.29260450160771706, + "acc_stderr": 0.025839898334877983, + "acc_norm": 0.29260450160771706, + "acc_norm_stderr": 0.025839898334877983 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21973094170403587, + "acc_stderr": 0.0277901770643836, + "acc_norm": 0.21973094170403587, + "acc_norm_stderr": 0.0277901770643836 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.20610687022900764, + "acc_stderr": 0.03547771004159463, + "acc_norm": 0.20610687022900764, + "acc_norm_stderr": 0.03547771004159463 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.029857515673386414, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.029857515673386414 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882367, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882367 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24102564102564103, + "acc_stderr": 0.021685546665333184, + "acc_norm": 0.24102564102564103, + "acc_norm_stderr": 0.021685546665333184 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678245, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678245 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022884, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022884 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.27350427350427353, + "acc_stderr": 0.029202540153431194, + "acc_norm": 0.27350427350427353, + "acc_norm_stderr": 0.029202540153431194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.0270087660907081, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.0270087660907081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.040693063197213775, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.040693063197213775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987054, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987054 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772436, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772436 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.023445826276545546, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.023445826276545546 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.0220213661002202, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.0220213661002202 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700293, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700293 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.01792308766780305, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.01792308766780305 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928724, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928724 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.023929155517351284, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.023929155517351284 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19834710743801653, + "acc_stderr": 0.03640118271990945, + "acc_norm": 0.19834710743801653, + "acc_norm_stderr": 0.03640118271990945 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312337, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312337 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.01716058723504635, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.01716058723504635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.20212765957446807, + "acc_stderr": 0.02395666823785024, + "acc_norm": 0.20212765957446807, + "acc_norm_stderr": 0.02395666823785024 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22569832402234638, + "acc_stderr": 0.013981395058455059, + "acc_norm": 0.22569832402234638, + "acc_norm_stderr": 0.013981395058455059 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.25735294117647056, + "acc_stderr": 0.026556519470041513, + "acc_norm": 0.25735294117647056, + "acc_norm_stderr": 0.026556519470041513 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073167, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842534, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842534 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178475, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570338, + "mc2": 0.4144742012895836, + "mc2_stderr": 0.015299571868403075 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27508854781582054, + "acc_stderr": 0.01535301075795265, + "acc_norm": 0.3105076741440378, + "acc_norm_stderr": 0.015908004528762017 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Nara-Lab/nallm-polyglot-ko-3.8b-base", + "model_sha": "8d20c1e3d77f2a9a58046b58fb229c809476d350", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/NousResearch/Nous-Capybara-7B/result_2023-09-29 13:26:34.json b/NousResearch/Nous-Capybara-7B/result_2023-09-29 13:26:34.json new file mode 100644 index 0000000000000000000000000000000000000000..4ece585f21b49bd1fb5c7220c504d79850cdf4a1 --- /dev/null +++ b/NousResearch/Nous-Capybara-7B/result_2023-09-29 13:26:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2790102389078498, + "acc_stderr": 0.013106784883601348, + "acc_norm": 0.318259385665529, + "acc_norm_stderr": 0.013611993916971453 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3414658434574786, + "acc_stderr": 0.0047323221721537485, + "acc_norm": 0.41884086835291773, + "acc_norm_stderr": 0.004923609207861538 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3933588761174968, + "acc_stderr": 0.017468556724503162, + "acc_norm": 0.3933588761174968, + "acc_norm_stderr": 0.017468556724503162 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357797, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357797 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3408360128617363, + "acc_stderr": 0.026920841260776162, + "acc_norm": 0.3408360128617363, + "acc_norm_stderr": 0.026920841260776162 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.03446897738659333, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.03446897738659333 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.03941707632064889, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.03941707632064889 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121633, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121633 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3871794871794872, + "acc_stderr": 0.024697216930878944, + "acc_norm": 0.3871794871794872, + "acc_norm_stderr": 0.024697216930878944 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.02727389059430064, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.02727389059430064 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03255326307272486, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03255326307272486 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33584905660377357, + "acc_stderr": 0.029067220146644826, + "acc_norm": 0.33584905660377357, + "acc_norm_stderr": 0.029067220146644826 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230193, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230193 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473834, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473834 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.035344398485395806, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.035344398485395806 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36127167630057805, + "acc_stderr": 0.02586220185227789, + "acc_norm": 0.36127167630057805, + "acc_norm_stderr": 0.02586220185227789 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271215, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271215 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3688073394495413, + "acc_stderr": 0.020686227560729548, + "acc_norm": 0.3688073394495413, + "acc_norm_stderr": 0.020686227560729548 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.01895088677080631, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.01895088677080631 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560538, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560538 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3673469387755102, + "acc_stderr": 0.030862144921087558, + "acc_norm": 0.3673469387755102, + "acc_norm_stderr": 0.030862144921087558 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.43037974683544306, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.43037974683544306, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771316, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.03393388584958406, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.03393388584958406 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.038517163193983954, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.038517163193983954 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.4712238434154724, + "mc2_stderr": 0.016160223034293618 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24557260920897284, + "acc_stderr": 0.014798357154972828, + "acc_norm": 0.27390791027154665, + "acc_norm_stderr": 0.01533249947479102 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "NousResearch/Nous-Capybara-7B", + "model_sha": "f57199fd0189f2472ee567279e9fb292beeee343", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/NousResearch/Nous-Hermes-llama-2-7b/result_2023-09-29 13:26:53.json b/NousResearch/Nous-Hermes-llama-2-7b/result_2023-09-29 13:26:53.json new file mode 100644 index 0000000000000000000000000000000000000000..ca6d1b3acd3781c68af6c2d0046a17eaf3a48c44 --- /dev/null +++ b/NousResearch/Nous-Hermes-llama-2-7b/result_2023-09-29 13:26:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.013057169655761838, + "acc_norm": 0.3191126279863481, + "acc_norm_stderr": 0.013621696119173304 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33718382792272455, + "acc_stderr": 0.004717820714968746, + "acc_norm": 0.416849233220474, + "acc_norm_stderr": 0.004920298437884909 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39208173690932313, + "acc_stderr": 0.017458524050147643, + "acc_norm": 0.39208173690932313, + "acc_norm_stderr": 0.017458524050147643 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680589, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680589 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.02698147804364803, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.02698147804364803 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.03170882426845501, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.03170882426845501 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.32323232323232326, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.32323232323232326, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307808, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307808 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3277310924369748, + "acc_stderr": 0.030489911417673227, + "acc_norm": 0.3277310924369748, + "acc_norm_stderr": 0.030489911417673227 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.02345467488940429, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.02345467488940429 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3419354838709677, + "acc_stderr": 0.02698528957655274, + "acc_norm": 0.3419354838709677, + "acc_norm_stderr": 0.02698528957655274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49572649572649574, + "acc_stderr": 0.032754892643821316, + "acc_norm": 0.49572649572649574, + "acc_norm_stderr": 0.032754892643821316 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32075471698113206, + "acc_stderr": 0.028727502957880263, + "acc_norm": 0.32075471698113206, + "acc_norm_stderr": 0.028727502957880263 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910507, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008937, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008937 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.472636815920398, + "acc_stderr": 0.03530235517334682, + "acc_norm": 0.472636815920398, + "acc_norm_stderr": 0.03530235517334682 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.034765996075164785, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.034765996075164785 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3554913294797688, + "acc_stderr": 0.025770292082977247, + "acc_norm": 0.3554913294797688, + "acc_norm_stderr": 0.025770292082977247 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02686949074481525, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02686949074481525 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.034998072761933376, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.034998072761933376 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3376146788990826, + "acc_stderr": 0.020275265986638903, + "acc_norm": 0.3376146788990826, + "acc_norm_stderr": 0.020275265986638903 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.027475969910660956, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.027475969910660956 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03583496176361061, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03583496176361061 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.018690850273595284, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.018690850273595284 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150381, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150381 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26727509778357234, + "acc_stderr": 0.011302607515637513, + "acc_norm": 0.26727509778357234, + "acc_norm_stderr": 0.011302607515637513 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.03256685484460388, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.03256685484460388 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.03756335775187897, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.03756335775187897 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.016238065069059605, + "mc2": 0.48487503732289583, + "mc2_stderr": 0.015806306421646696 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2644628099173554, + "acc_stderr": 0.015163499477892407, + "acc_norm": 0.30342384887839435, + "acc_norm_stderr": 0.015806072717909566 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "NousResearch/Nous-Hermes-llama-2-7b", + "model_sha": "b7c3ec54b754175e006ef75696a2ba3802697078", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/NousResearch/Yarn-Mistral-7b-128k/result_2023-12-08 01:35:30.json b/NousResearch/Yarn-Mistral-7b-128k/result_2023-12-08 01:35:30.json new file mode 100644 index 0000000000000000000000000000000000000000..85d713e1fb68463ee09838647af150e13bb1ba77 --- /dev/null +++ b/NousResearch/Yarn-Mistral-7b-128k/result_2023-12-08 01:35:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.318259385665529, + "acc_stderr": 0.013611993916971453, + "acc_norm": 0.3583617747440273, + "acc_norm_stderr": 0.01401288333485986 + }, + "harness|ko_hellaswag|10": { + "acc": 0.359788886675961, + "acc_stderr": 0.004789575163418655, + "acc_norm": 0.4539932284405497, + "acc_norm_stderr": 0.0049686135393092485 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.017758800534214417, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.017758800534214417 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.02801365189199507, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.02801365189199507 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110656, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110656 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264714, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.030197611600197946, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.030197611600197946 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.03765746693865149, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.03765746693865149 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361816, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361816 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584926, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584926 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.021406952688151588, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.021406952688151588 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3545751633986928, + "acc_stderr": 0.0193533605475537, + "acc_norm": 0.3545751633986928, + "acc_norm_stderr": 0.0193533605475537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31843575418994413, + "acc_stderr": 0.015581008080360276, + "acc_norm": 0.31843575418994413, + "acc_norm_stderr": 0.015581008080360276 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280058, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280058 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3109517601043025, + "acc_stderr": 0.011822252917799207, + "acc_norm": 0.3109517601043025, + "acc_norm_stderr": 0.011822252917799207 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713606, + "mc2": 0.4818640958328875, + "mc2_stderr": 0.015614192429758816 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3919716646989374, + "acc_stderr": 0.016784332119424088, + "acc_norm": 0.4946871310507674, + "acc_norm_stderr": 0.01718938362722969 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "NousResearch/Yarn-Mistral-7b-128k", + "model_sha": "d09f1f8ed437d61c1aff94c1beabee554843dcdd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/NousResearch/Yarn-Mistral-7b-64k/result_2023-12-08 01:34:46.json b/NousResearch/Yarn-Mistral-7b-64k/result_2023-12-08 01:34:46.json new file mode 100644 index 0000000000000000000000000000000000000000..45326443718417c61cb236792c0749408938e507 --- /dev/null +++ b/NousResearch/Yarn-Mistral-7b-64k/result_2023-12-08 01:34:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.013659980894277373, + "acc_norm": 0.35921501706484643, + "acc_norm_stderr": 0.014020224155839155 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3604859589723163, + "acc_stderr": 0.004791601975612767, + "acc_norm": 0.4561840270862378, + "acc_norm_stderr": 0.0049705853282976204 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4393358876117497, + "acc_stderr": 0.017747874245683616, + "acc_norm": 0.4393358876117497, + "acc_norm_stderr": 0.017747874245683616 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236784, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236784 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330315, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330315 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.041379310344827586, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.041379310344827586 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993179, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993179 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.02531063925493391, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.02531063925493391 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220385, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776296, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.04760548821460325, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.04760548821460325 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.03461199429040013, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.03461199429040013 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.026788811931562753, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.026788811931562753 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470867, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470867 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47155963302752296, + "acc_stderr": 0.021402615697348044, + "acc_norm": 0.47155963302752296, + "acc_norm_stderr": 0.021402615697348044 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.028408302020332687, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.028408302020332687 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.01931267606578656, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.01931267606578656 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31508379888268156, + "acc_stderr": 0.015536850852473638, + "acc_norm": 0.31508379888268156, + "acc_norm_stderr": 0.015536850852473638 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.031996152328062875, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.031996152328062875 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.03239360017397471, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.03239360017397471 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503318, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361012, + "mc2": 0.4812011290103991, + "mc2_stderr": 0.015609821540397258 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4049586776859504, + "acc_stderr": 0.01687694116504561, + "acc_norm": 0.49940968122786306, + "acc_norm_stderr": 0.017190342123448665 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "NousResearch/Yarn-Mistral-7b-64k", + "model_sha": "0273c624561fcecc8e8f4030492a9307aa60f945", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OMK510/ko-llama2-toy/result_2023-10-18 09:10:50.json b/OMK510/ko-llama2-toy/result_2023-10-18 09:10:50.json new file mode 100644 index 0000000000000000000000000000000000000000..cbc9608c20b2c9495ac6790ff8a96526dcba00ce --- /dev/null +++ b/OMK510/ko-llama2-toy/result_2023-10-18 09:10:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2150170648464164, + "acc_stderr": 0.012005717634133611, + "acc_norm": 0.27474402730375425, + "acc_norm_stderr": 0.013044617212771227 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25403306114319857, + "acc_stderr": 0.00434426617963492, + "acc_norm": 0.2605058753236407, + "acc_norm_stderr": 0.004380136468543937 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245231, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245231 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27330779054916987, + "acc_stderr": 0.015936681062628556, + "acc_norm": 0.27330779054916987, + "acc_norm_stderr": 0.015936681062628556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977148, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977148 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.03070982405056527, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.03070982405056527 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788513, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788513 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.18834080717488788, + "acc_stderr": 0.026241132996407256, + "acc_norm": 0.18834080717488788, + "acc_norm_stderr": 0.026241132996407256 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.039417076320648906, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.039417076320648906 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176892, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176892 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.023454674889404295, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.023454674889404295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33225806451612905, + "acc_stderr": 0.026795560848122787, + "acc_norm": 0.33225806451612905, + "acc_norm_stderr": 0.026795560848122787 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.02634148037111834, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.02634148037111834 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566016, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.02525117393649502, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.02525117393649502 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.01792308766780305, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.01792308766780305 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.02505850331695815, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.02505850331695815 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137283, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137283 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791013, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932267, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932267 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329387, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329387 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.01132873440314031, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.01132873440314031 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139406, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139406 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476196, + "mc2": 0.5244892940135847, + "mc2_stderr": 0.01678983173354145 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09799291617473435, + "acc_stderr": 0.010221558855214903, + "acc_norm": 0.30460448642266824, + "acc_norm_stderr": 0.01582336727312938 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OMK510/ko-llama2-toy", + "model_sha": "5ad4552d4e7990214eec24fcadf84eba584fba1f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OMK510/omk_mixed2/result_2023-10-20 04:05:12.json b/OMK510/omk_mixed2/result_2023-10-20 04:05:12.json new file mode 100644 index 0000000000000000000000000000000000000000..68f1bcab1c67a846e91a9670802b640022e7d666 --- /dev/null +++ b/OMK510/omk_mixed2/result_2023-10-20 04:05:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000324, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137998 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4086835291774547, + "acc_stderr": 0.004905859114942308, + "acc_norm": 0.5462059350726947, + "acc_norm_stderr": 0.0049684294763450345 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49808429118773945, + "acc_stderr": 0.017879832259026677, + "acc_norm": 0.49808429118773945, + "acc_norm_stderr": 0.017879832259026677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596239, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596239 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484068, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484068 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.024359581465396955, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.024359581465396955 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978813, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978813 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054064, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054064 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5854700854700855, + "acc_stderr": 0.03227396567623779, + "acc_norm": 0.5854700854700855, + "acc_norm_stderr": 0.03227396567623779 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655795, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470021, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470021 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.01885008469646872, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.01885008469646872 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169945, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169945 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.03085199299325701, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.03085199299325701 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.025187786660227276, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.025187786660227276 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4978902953586498, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.4978902953586498, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29595827900912647, + "acc_stderr": 0.011658518525277039, + "acc_norm": 0.29595827900912647, + "acc_norm_stderr": 0.011658518525277039 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.0153218216884762, + "mc2": 0.4217472836360241, + "mc2_stderr": 0.014796357378387609 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727637, + "acc_norm": 0.5655253837072018, + "acc_norm_stderr": 0.017042098620824928 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OMK510/omk_mixed2", + "model_sha": "8fb8a29ecba1b69a023885fcf11ea223b491bbac", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Open-Orca/Mistral-7B-OpenOrca/result_2023-11-15 23:41:59.json b/Open-Orca/Mistral-7B-OpenOrca/result_2023-11-15 23:41:59.json new file mode 100644 index 0000000000000000000000000000000000000000..2cff1a3087dada9021faa1dbeec4b61ab0dde534 --- /dev/null +++ b/Open-Orca/Mistral-7B-OpenOrca/result_2023-11-15 23:41:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33361774744027306, + "acc_stderr": 0.013778687054176536, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37054371639115713, + "acc_stderr": 0.004819633668832544, + "acc_norm": 0.47480581557458673, + "acc_norm_stderr": 0.004983442888677775 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4521072796934866, + "acc_stderr": 0.01779775149386562, + "acc_norm": 0.4521072796934866, + "acc_norm_stderr": 0.01779775149386562 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085328, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085328 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102308, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102308 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347354, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347354 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.03487558640462064, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.03487558640462064 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.019659922493623343, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.019659922493623343 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22681564245810057, + "acc_stderr": 0.014005843570897887, + "acc_norm": 0.22681564245810057, + "acc_norm_stderr": 0.014005843570897887 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841195, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763128, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.031784718745647304, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.031784718745647304 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646566, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646566 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.01627228795791693, + "mc2": 0.4954694202499875, + "mc2_stderr": 0.01593113231156763 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.500590318772137, + "acc_norm_stderr": 0.017190342123448586 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Open-Orca/Mistral-7B-OpenOrca", + "model_sha": "8f934b2bd2d4484b846a7faf1c53093b9d956367", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Open-Orca/Mistral-7B-SlimOrca/result_2023-10-25 02:17:52.json b/Open-Orca/Mistral-7B-SlimOrca/result_2023-10-25 02:17:52.json new file mode 100644 index 0000000000000000000000000000000000000000..315640acf2385434ea38b4bb20e1c9075cb6af95 --- /dev/null +++ b/Open-Orca/Mistral-7B-SlimOrca/result_2023-10-25 02:17:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.013796182947785559, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668526 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3732324238199562, + "acc_stderr": 0.004826746160830184, + "acc_norm": 0.48048197570205137, + "acc_norm_stderr": 0.004985978214937919 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.017869330154003705, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.017869330154003705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.041716541613545426, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.041716541613545426 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.024939313906940777, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.024939313906940777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48256880733944957, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.48256880733944957, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577447, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.1877094972067039, + "acc_stderr": 0.013059605303257046, + "acc_norm": 0.1877094972067039, + "acc_norm_stderr": 0.013059605303257046 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131775, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131775 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897639, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897639 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.03374499356319355, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.03374499356319355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.016185744355144895, + "mc2": 0.4995755882922268, + "mc2_stderr": 0.015845203460942626 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.017067699774312974, + "acc_norm": 0.45690672963400236, + "acc_norm_stderr": 0.017126389093086784 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Open-Orca/Mistral-7B-SlimOrca", + "model_sha": "da461634dccd94d2da6a8de3b3cd84a527f60588", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OpenBuddy/openbuddy-llama2-13b-v8.1-fp16/result_2023-10-04 09:36:31.json b/OpenBuddy/openbuddy-llama2-13b-v8.1-fp16/result_2023-10-04 09:36:31.json new file mode 100644 index 0000000000000000000000000000000000000000..19a1887e9f75a0221b92ae751f38aada1a9de42a --- /dev/null +++ b/OpenBuddy/openbuddy-llama2-13b-v8.1-fp16/result_2023-10-04 09:36:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.23464163822525597, + "acc_stderr": 0.012383873560768673, + "acc_norm": 0.26023890784982934, + "acc_norm_stderr": 0.01282193022511256 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2562238597888867, + "acc_stderr": 0.0043565471858470406, + "acc_norm": 0.2517426807408883, + "acc_norm_stderr": 0.004331271717773835 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.01607312785122124, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.01607312785122124 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621502, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621502 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944966, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944966 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.03076935200822915, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.03076935200822915 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936245, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936245 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.02738140692786897, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.02738140692786897 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.0270087660907081, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.0270087660907081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.02564410863926762, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.02564410863926762 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573026, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573026 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594316, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845335, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.02425790170532338, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.02425790170532338 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615771, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023132376234543353, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023132376234543353 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333337, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333337 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341016, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341016 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.016992723465466236, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.016992723465466236 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983566, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983566 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25684485006518903, + "acc_stderr": 0.011158455853098844, + "acc_norm": 0.25684485006518903, + "acc_norm_stderr": 0.011158455853098844 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.031822318676475544, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.031822318676475544 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.01559475363200652, + "mc2": 0.4462776885774457, + "mc2_stderr": 0.01611369655251753 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07083825265643448, + "acc_stderr": 0.008820524274864483, + "acc_norm": 0.21841794569067297, + "acc_norm_stderr": 0.014205164490995575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OpenBuddy/openbuddy-llama2-13b-v8.1-fp16", + "model_sha": "982a6b50fe0fa7e1bc3816d6d28484038e9daf29", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OpenBuddy/openbuddy-llemma-34b-v13.2/result_2023-11-09 12:03:35.json b/OpenBuddy/openbuddy-llemma-34b-v13.2/result_2023-11-09 12:03:35.json new file mode 100644 index 0000000000000000000000000000000000000000..d7244e71f44c053310c450232bf46161a08a5995 --- /dev/null +++ b/OpenBuddy/openbuddy-llemma-34b-v13.2/result_2023-11-09 12:03:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3728668941979522, + "acc_stderr": 0.01413117676013117, + "acc_norm": 0.41467576791808874, + "acc_norm_stderr": 0.014397070564409174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36496713802031466, + "acc_stderr": 0.004804370563856228, + "acc_norm": 0.45717984465245964, + "acc_norm_stderr": 0.004971449552787176 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.04721188506097172, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.04721188506097172 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4904214559386973, + "acc_stderr": 0.017876682275340887, + "acc_norm": 0.4904214559386973, + "acc_norm_stderr": 0.017876682275340887 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.03266204299064678, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.03266204299064678 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868554, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868554 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813332, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131143, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131143 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5266055045871559, + "acc_stderr": 0.021406952688151574, + "acc_norm": 0.5266055045871559, + "acc_norm_stderr": 0.021406952688151574 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521664, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521664 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577447, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527824, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527824 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.03175195237583323, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.03175195237583323 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002574, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002574 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.46212381405853503, + "mc2_stderr": 0.01568383395016852 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.01718832921965428, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.017145715365486664 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OpenBuddy/openbuddy-llemma-34b-v13.2", + "model_sha": "1abff297a8eae622c0f106854f2a8fbfcfa9c119", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OpenBuddy/openbuddy-zephyr-7b-v14.1/result_2023-11-15 05:52:01.json b/OpenBuddy/openbuddy-zephyr-7b-v14.1/result_2023-11-15 05:52:01.json new file mode 100644 index 0000000000000000000000000000000000000000..2c431439b63009570b6d351722b6bec2c977fcb9 --- /dev/null +++ b/OpenBuddy/openbuddy-zephyr-7b-v14.1/result_2023-11-15 05:52:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20136518771331058, + "acc_stderr": 0.011718927477444263, + "acc_norm": 0.25, + "acc_norm_stderr": 0.012653835621466646 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2535351523600876, + "acc_stderr": 0.004341454841892329, + "acc_norm": 0.2508464449312886, + "acc_norm_stderr": 0.004326143430360104 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.033773102522091945, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.033773102522091945 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3397190293742018, + "acc_stderr": 0.016936394114301645, + "acc_norm": 0.3397190293742018, + "acc_norm_stderr": 0.016936394114301645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.02880998985410297, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.02880998985410297 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680589, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680589 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.35691318327974275, + "acc_stderr": 0.02721042037593403, + "acc_norm": 0.35691318327974275, + "acc_norm_stderr": 0.02721042037593403 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.29596412556053814, + "acc_stderr": 0.030636591348699796, + "acc_norm": 0.29596412556053814, + "acc_norm_stderr": 0.030636591348699796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.029344572500634342, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.029344572500634342 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.02428314052946729, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.02428314052946729 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032495, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032495 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4700854700854701, + "acc_stderr": 0.032697411068124425, + "acc_norm": 0.4700854700854701, + "acc_norm_stderr": 0.032697411068124425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30566037735849055, + "acc_stderr": 0.028353298073322666, + "acc_norm": 0.30566037735849055, + "acc_norm_stderr": 0.028353298073322666 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.034791855725996614, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.034791855725996614 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655812, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655812 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554858, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554858 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3901734104046243, + "acc_stderr": 0.026261677607806636, + "acc_norm": 0.3901734104046243, + "acc_norm_stderr": 0.026261677607806636 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625676, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.03458816042181006, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.03458816042181006 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30091743119266057, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.30091743119266057, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302884, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302884 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4297520661157025, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537766, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537766 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578731, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578731 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021595, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021595 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.01444415780826145, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.01444415780826145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.379746835443038, + "acc_stderr": 0.03159188752965851, + "acc_norm": 0.379746835443038, + "acc_norm_stderr": 0.03159188752965851 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503318, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501954, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501954 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.01529807750948508, + "mc2": 0.4426420327385365, + "mc2_stderr": 0.016343415069680026 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08146399055489964, + "acc_stderr": 0.00940471744194626, + "acc_norm": 0.32585596221959856, + "acc_norm_stderr": 0.01611402389480034 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OpenBuddy/openbuddy-zephyr-7b-v14.1", + "model_sha": "208b6fb841239a36fb0ea675179a231e0ad9d287", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v1/result_2023-11-23 04:55:42.json b/PracticeLLM/Custom-KoLLM-13B-v1/result_2023-11-23 04:55:42.json new file mode 100644 index 0000000000000000000000000000000000000000..941e5461aef7888e8806bdd71b0ac36dd6976752 --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v1/result_2023-11-23 04:55:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40187713310580203, + "acc_stderr": 0.014327268614578274, + "acc_norm": 0.4598976109215017, + "acc_norm_stderr": 0.014564318856924848 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4257120095598486, + "acc_stderr": 0.0049343998025949045, + "acc_norm": 0.569308902609042, + "acc_norm_stderr": 0.004941609820763589 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.01784491809046855, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.01784491809046855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.024915243985987844, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.024915243985987844 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.0281291127091659, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.0281291127091659 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895992, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895992 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.02271746789770861, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.02271746789770861 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5467889908256881, + "acc_stderr": 0.021343255165546044, + "acc_norm": 0.5467889908256881, + "acc_norm_stderr": 0.021343255165546044 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402544, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402544 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714857, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714857 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32333767926988266, + "acc_stderr": 0.011946565758447197, + "acc_norm": 0.32333767926988266, + "acc_norm_stderr": 0.011946565758447197 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.4166001481815424, + "mc2_stderr": 0.014823792417118063 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5619834710743802, + "acc_stderr": 0.01705775370216029, + "acc_norm": 0.6458087367178277, + "acc_norm_stderr": 0.01644317574921476 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v1", + "model_sha": "f2647ab150f35533ea3197cf7a6a18d21422df44", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v2/result_2023-11-25 07:15:53.json b/PracticeLLM/Custom-KoLLM-13B-v2/result_2023-11-25 07:15:53.json new file mode 100644 index 0000000000000000000000000000000000000000..5623fd28fb8d89dd7197385e61a2a7b2b1f2cbbe --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v2/result_2023-11-25 07:15:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910471, + "acc_norm": 0.45733788395904434, + "acc_norm_stderr": 0.01455810654392406 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4255128460466043, + "acc_stderr": 0.004934100774481221, + "acc_norm": 0.5697072296355308, + "acc_norm_stderr": 0.004941051795214796 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.524904214559387, + "acc_stderr": 0.01785777070490103, + "acc_norm": 0.524904214559387, + "acc_norm_stderr": 0.01785777070490103 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.043171711948702535, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.043171711948702535 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378947, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378947 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03196876989195778, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03196876989195778 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38974358974358975, + "acc_stderr": 0.024726967886647074, + "acc_norm": 0.38974358974358975, + "acc_norm_stderr": 0.024726967886647074 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.0336612448905145, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.0336612448905145 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3903225806451613, + "acc_stderr": 0.027751256636969576, + "acc_norm": 0.3903225806451613, + "acc_norm_stderr": 0.027751256636969576 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043840985, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043840985 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712156, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712156 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.47761194029850745, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.47761194029850745, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.026483392042098174, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.026483392042098174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662737, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529414, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529414 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848879, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848879 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.027732834353363944, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.027732834353363944 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3545751633986928, + "acc_stderr": 0.01935336054755371, + "acc_norm": 0.3545751633986928, + "acc_norm_stderr": 0.01935336054755371 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372937, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988644, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988644 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29335071707953064, + "acc_stderr": 0.011628520449582075, + "acc_norm": 0.29335071707953064, + "acc_norm_stderr": 0.011628520449582075 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.034602283272391704, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.034602283272391704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.039042723414318574, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.039042723414318574 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.015077219200662564, + "mc2": 0.38751888371590926, + "mc2_stderr": 0.014562542001309185 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077307, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.016756921571069422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v2", + "model_sha": "c309f5b24994489e7d44ef766605e5559e730a22", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v3/result_2023-11-26 07:38:53.json b/PracticeLLM/Custom-KoLLM-13B-v3/result_2023-11-26 07:38:53.json new file mode 100644 index 0000000000000000000000000000000000000000..61e89bc5920ccdd3c632624a8964102f626041e9 --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v3/result_2023-11-26 07:38:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251104, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526842 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4239195379406493, + "acc_stderr": 0.004931679059919374, + "acc_norm": 0.5689105755825533, + "acc_norm_stderr": 0.004942164585991475 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5300127713920817, + "acc_stderr": 0.01784772308664908, + "acc_norm": 0.5300127713920817, + "acc_norm_stderr": 0.01784772308664908 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461224, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461224 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165582, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165582 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4421965317919075, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.4421965317919075, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.027563010971606676, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.027563010971606676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848876, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848876 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.044492703500683836, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.044492703500683836 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.039105257528497236, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.039105257528497236 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355445, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355445 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.0278079901413202, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.0278079901413202 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.02792096314799366, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.02792096314799366 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411962, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411962 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789848, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789848 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29139504563233376, + "acc_stderr": 0.011605720214257617, + "acc_norm": 0.29139504563233376, + "acc_norm_stderr": 0.011605720214257617 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775527, + "mc2": 0.44221166184758365, + "mc2_stderr": 0.014852170962038991 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4025974025974026, + "acc_stderr": 0.01686102048640777, + "acc_norm": 0.45336481700118064, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v3", + "model_sha": "65f37e4173e111f31c7094387a9de5627f9d3536", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v4/result_2023-11-27 15:16:50.json b/PracticeLLM/Custom-KoLLM-13B-v4/result_2023-11-27 15:16:50.json new file mode 100644 index 0000000000000000000000000000000000000000..f51874a4250f51350f5679c754e397fe7b4cd7ab --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v4/result_2023-11-27 15:16:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3924914675767918, + "acc_stderr": 0.014269634635670722, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4253136825333599, + "acc_stderr": 0.004933800927560533, + "acc_norm": 0.5706034654451304, + "acc_norm_stderr": 0.004939784311448985 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.01779775149386563, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.01779775149386563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419034, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419034 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.033661244890514495, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.033661244890514495 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849738, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849738 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276612, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276612 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5761467889908257, + "acc_stderr": 0.02118726320908751, + "acc_norm": 0.5761467889908257, + "acc_norm_stderr": 0.02118726320908751 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387296, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289784, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289784 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483927, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483927 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.031557828165561644, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.031557828165561644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048228, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048228 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.01565960575532691, + "mc2": 0.42927679239357447, + "mc2_stderr": 0.014965776733274934 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.01715407371668287, + "acc_norm": 0.6257378984651711, + "acc_norm_stderr": 0.016637917789798746 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v4", + "model_sha": "64859181e99108e5033e34ea2a5162400bb1a803", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v5/result_2023-11-28 18:53:32.json b/PracticeLLM/Custom-KoLLM-13B-v5/result_2023-11-28 18:53:32.json new file mode 100644 index 0000000000000000000000000000000000000000..f554eb385be264cb3e51f16f0c70dea28d631ad0 --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v5/result_2023-11-28 18:53:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3779863481228669, + "acc_stderr": 0.014169664520303094, + "acc_norm": 0.44880546075085326, + "acc_norm_stderr": 0.014534599585097669 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4272057359091814, + "acc_stderr": 0.00493661642892264, + "acc_norm": 0.5674168492332204, + "acc_norm_stderr": 0.004944215937021391 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.541507024265645, + "acc_stderr": 0.017818248603465578, + "acc_norm": 0.541507024265645, + "acc_norm_stderr": 0.017818248603465578 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123005, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177476, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177476 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073824, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073824 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165581, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165581 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.02680372058320617, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.02680372058320617 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.02835895631342355, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.02835895631342355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.019373332420724507, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.019373332420724507 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536023, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536023 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935893, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935893 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823063, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03471157907953425, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03471157907953425 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.01559475363200651, + "mc2": 0.42820899219390524, + "mc2_stderr": 0.014856416829616066 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077307, + "acc_norm": 0.6080283353010626, + "acc_norm_stderr": 0.016784332119424088 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v5", + "model_sha": "c1190fbd8699492d5e87775303df899f326f47da", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v6/result_2023-11-29 06:51:23.json b/PracticeLLM/Custom-KoLLM-13B-v6/result_2023-11-29 06:51:23.json new file mode 100644 index 0000000000000000000000000000000000000000..dee84f024c92701e67e6064f518ef3a5e4d452c2 --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v6/result_2023-11-29 06:51:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39334470989761094, + "acc_stderr": 0.014275101465693024, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633832 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4245170284803824, + "acc_stderr": 0.00493259334881362, + "acc_norm": 0.5660227046405099, + "acc_norm_stderr": 0.004946089230153022 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.017829131764287187, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.017829131764287187 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956278, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956278 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.025174048384000777, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.025174048384000777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230175, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230175 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5376146788990825, + "acc_stderr": 0.021376575274397576, + "acc_norm": 0.5376146788990825, + "acc_norm_stderr": 0.021376575274397576 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013317, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013317 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094607, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094607 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.0317229500433233, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.0317229500433233 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3122555410691004, + "acc_stderr": 0.011835798135683185, + "acc_norm": 0.3122555410691004, + "acc_norm_stderr": 0.011835798135683185 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.01565960575532691, + "mc2": 0.42310297040812733, + "mc2_stderr": 0.01480446318735434 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4498229043683589, + "acc_stderr": 0.01710357334382571, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v6", + "model_sha": "4e3b471cca7e769c11dbb9f30ca3dd4256911d66", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v7/result_2023-11-30 15:59:42.json b/PracticeLLM/Custom-KoLLM-13B-v7/result_2023-11-30 15:59:42.json new file mode 100644 index 0000000000000000000000000000000000000000..bcd3cb0193b4be77c7ef29b9dbbe9bbd73ca5b94 --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v7/result_2023-11-30 15:59:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938162, + "acc_norm": 0.4590443686006826, + "acc_norm_stderr": 0.014562291073601229 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4246166102370046, + "acc_stderr": 0.004932745013072717, + "acc_norm": 0.5680143397729536, + "acc_norm_stderr": 0.004943400892881046 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5325670498084292, + "acc_stderr": 0.017841995750520867, + "acc_norm": 0.5325670498084292, + "acc_norm_stderr": 0.017841995750520867 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561063, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.02686462436675665, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.02686462436675665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048409, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048409 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269955, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269955 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5504587155963303, + "acc_stderr": 0.021327881417823387, + "acc_norm": 0.5504587155963303, + "acc_norm_stderr": 0.021327881417823387 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.041349130183033156, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.041349130183033156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.019312676065786575, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.019312676065786575 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467763, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467763 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0321495214780275, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0321495214780275 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235926, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235926 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503316, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522505, + "mc2": 0.4142296152328429, + "mc2_stderr": 0.014852594216061029 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5053128689492326, + "acc_stderr": 0.01718938362722971, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.016876941165045612 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v7", + "model_sha": "1fa610cc17b0a5c51c0637c98b9ac671df98c27c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v8/result_2023-12-02 18:41:57.json b/PracticeLLM/Custom-KoLLM-13B-v8/result_2023-12-02 18:41:57.json new file mode 100644 index 0000000000000000000000000000000000000000..64741ce022dcb174ec8956486f18bb551a6c6133 --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v8/result_2023-12-02 18:41:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938167, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.014555949760496439 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42362079267078273, + "acc_stderr": 0.004931219148182245, + "acc_norm": 0.569806811392153, + "acc_norm_stderr": 0.004940911779273374 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.017829131764287187, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.017829131764287187 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840688, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840688 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009225, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009225 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.03536085947529481, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.03536085947529481 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.024939313906940774, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.024939313906940774 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276611, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276611 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068652, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068652 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261746, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261746 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387296, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.01941253924203216, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.01941253924203216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280048, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280048 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3122555410691004, + "acc_stderr": 0.011835798135683185, + "acc_norm": 0.3122555410691004, + "acc_norm_stderr": 0.011835798135683185 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.039036986477484416, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.039036986477484416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.01548369193923726, + "mc2": 0.42521422618331217, + "mc2_stderr": 0.014890426369457616 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5407319952774499, + "acc_stderr": 0.01713321827653767, + "acc_norm": 0.6269185360094451, + "acc_norm_stderr": 0.01662731827513747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v8", + "model_sha": "a09ef58abf42fd2fcbf5149126bcd8d13838cc97", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/SOLAR-tail-10.7B-Merge-v1.0/result_2023-12-26 19:07:34.json b/PracticeLLM/SOLAR-tail-10.7B-Merge-v1.0/result_2023-12-26 19:07:34.json new file mode 100644 index 0000000000000000000000000000000000000000..191f403feb19f6a56caf135ae35391cc733a00fd --- /dev/null +++ b/PracticeLLM/SOLAR-tail-10.7B-Merge-v1.0/result_2023-12-26 19:07:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.013975454122756564, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38169687313284206, + "acc_stderr": 0.004848099661619702, + "acc_norm": 0.5087631945827524, + "acc_norm_stderr": 0.004989014986235632 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370608, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370608 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.017570705239256586, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.017570705239256586 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340354, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340354 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.028256660723360173, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.028256660723360173 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03358618145732523, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03358618145732523 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5419354838709678, + "acc_stderr": 0.028343787250540618, + "acc_norm": 0.5419354838709678, + "acc_norm_stderr": 0.028343787250540618 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914387, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914387 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524572, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524572 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.032357437893550445, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.032357437893550445 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155247, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155247 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.027586006221607708, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.027586006221607708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583703, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583703 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.02070745816435298, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.02070745816435298 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.028555827516528777, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.028555827516528777 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46895424836601307, + "acc_stderr": 0.020188804456361887, + "acc_norm": 0.46895424836601307, + "acc_norm_stderr": 0.020188804456361887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.02889395541211589, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.02889395541211589 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.03409386946992699, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.03409386946992699 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152608, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976694, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.030381931949990407, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.030381931949990407 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38852672750977835, + "acc_stderr": 0.012448817838292364, + "acc_norm": 0.38852672750977835, + "acc_norm_stderr": 0.012448817838292364 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.03465868196380762, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.03465868196380762 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380027, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380027 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.46871726153267024, + "mc2_stderr": 0.016338202358424335 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4982290436835891, + "acc_stderr": 0.01719024627623186, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/SOLAR-tail-10.7B-Merge-v1.0", + "model_sha": "92349666d0209524a920adefafad53f82aecfee8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/SOLAR-tail-10.7B-instruct-v1.0/result_2023-12-28 15:26:44.json b/PracticeLLM/SOLAR-tail-10.7B-instruct-v1.0/result_2023-12-28 15:26:44.json new file mode 100644 index 0000000000000000000000000000000000000000..e35a90b6f3602d5d2d08243bfdc5f8dd1f7d5607 --- /dev/null +++ b/PracticeLLM/SOLAR-tail-10.7B-instruct-v1.0/result_2023-12-28 15:26:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40955631399317405, + "acc_stderr": 0.014370358632472428, + "acc_norm": 0.46928327645051193, + "acc_norm_stderr": 0.014583792546304037 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4307906791475802, + "acc_stderr": 0.0049417488176823, + "acc_norm": 0.5818562039434375, + "acc_norm_stderr": 0.004922459820434773 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.04721188506097172, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.04721188506097172 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6309067688378033, + "acc_stderr": 0.017256283109124634, + "acc_norm": 0.6309067688378033, + "acc_norm_stderr": 0.017256283109124634 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.03268572658667493, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.03268572658667493 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5755627009646302, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.5755627009646302, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.03135305009533086, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.03135305009533086 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.02534267129380724, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.02534267129380724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.02762171783290703, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.02762171783290703 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890488, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890488 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.038118909889404126, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.038118909889404126 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.02530590624159064, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.02530590624159064 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6788990825688074, + "acc_stderr": 0.020018149772733744, + "acc_norm": 0.6788990825688074, + "acc_norm_stderr": 0.020018149772733744 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.028526383452142635, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.028526383452142635 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4918300653594771, + "acc_stderr": 0.020225134343057265, + "acc_norm": 0.4918300653594771, + "acc_norm_stderr": 0.020225134343057265 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.02946218923337059, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.02946218923337059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21787709497206703, + "acc_stderr": 0.013806211780732986, + "acc_norm": 0.21787709497206703, + "acc_norm_stderr": 0.013806211780732986 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.02981802474975309, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.02981802474975309 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3970013037809648, + "acc_stderr": 0.012496346982909553, + "acc_norm": 0.3970013037809648, + "acc_norm_stderr": 0.012496346982909553 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6127450980392157, + "acc_stderr": 0.034189312338333444, + "acc_norm": 0.6127450980392157, + "acc_norm_stderr": 0.034189312338333444 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.036462049632538136, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.036462049632538136 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.016185744355144912, + "mc2": 0.46519091505563187, + "mc2_stderr": 0.015560577374692961 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.017142736117643304 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/SOLAR-tail-10.7B-instruct-v1.0", + "model_sha": "89df820084202a2da014491ee6ebe5c9f8ff9004", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Twice-KoSOLAR-16.1B-test/result_2023-12-30 03:41:23.json b/PracticeLLM/Twice-KoSOLAR-16.1B-test/result_2023-12-30 03:41:23.json new file mode 100644 index 0000000000000000000000000000000000000000..1f42af4dca902beac4c12b392847cd9e662ca954 --- /dev/null +++ b/PracticeLLM/Twice-KoSOLAR-16.1B-test/result_2023-12-30 03:41:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3984641638225256, + "acc_stderr": 0.014306946052735563, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.014555949760496437 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41734714200358497, + "acc_stderr": 0.0049211338649318885, + "acc_norm": 0.571400119498108, + "acc_norm_stderr": 0.004938643787869551 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6081871345029239, + "acc_stderr": 0.03743979825926399, + "acc_norm": 0.6081871345029239, + "acc_norm_stderr": 0.03743979825926399 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6360153256704981, + "acc_stderr": 0.017205684809032232, + "acc_norm": 0.6360153256704981, + "acc_norm_stderr": 0.017205684809032232 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028581993, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028581993 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.025275892070240655, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.025275892070240655 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935427, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935427 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155257, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932262, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932262 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5578034682080925, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.5578034682080925, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.027237415094592488, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.027237415094592488 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.03292296639155141, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.03292296639155141 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6238532110091743, + "acc_stderr": 0.02076923196820508, + "acc_norm": 0.6238532110091743, + "acc_norm_stderr": 0.02076923196820508 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147124, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147124 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46895424836601307, + "acc_stderr": 0.020188804456361887, + "acc_norm": 0.46895424836601307, + "acc_norm_stderr": 0.020188804456361887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596154, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596154 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497726, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497726 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5110294117647058, + "acc_stderr": 0.030365446477275675, + "acc_norm": 0.5110294117647058, + "acc_norm_stderr": 0.030365446477275675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6163265306122448, + "acc_stderr": 0.031130880396235946, + "acc_norm": 0.6163265306122448, + "acc_norm_stderr": 0.031130880396235946 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39504563233376794, + "acc_stderr": 0.012485727813251562, + "acc_norm": 0.39504563233376794, + "acc_norm_stderr": 0.012485727813251562 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165633, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165633 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219371, + "mc2": 0.4298875800555882, + "mc2_stderr": 0.015553207955178432 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46635182998819363, + "acc_stderr": 0.017151384117131872, + "acc_norm": 0.538370720188902, + "acc_norm_stderr": 0.017139660221845553 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Twice-KoSOLAR-16.1B-test", + "model_sha": "bb2523b46fd43ed82852d295418c5243e3666d15", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.15/result_2023-12-04 03:30:04.json b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.15/result_2023-12-04 03:30:04.json new file mode 100644 index 0000000000000000000000000000000000000000..b39b2e49527f6c0f6d763a976e3e3ac799b82a58 --- /dev/null +++ b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.15/result_2023-12-04 03:30:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3984641638225256, + "acc_stderr": 0.014306946052735567, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.014549221105171864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4246166102370046, + "acc_stderr": 0.004932745013072715, + "acc_norm": 0.5688109938259311, + "acc_norm_stderr": 0.004942302768002102 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394223, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394223 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.03536085947529481, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.03536085947529481 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102308, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102308 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4653179190751445, + "acc_stderr": 0.026854257928258886, + "acc_norm": 0.4653179190751445, + "acc_norm_stderr": 0.026854257928258886 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5559633027522936, + "acc_stderr": 0.021302621211654518, + "acc_norm": 0.5559633027522936, + "acc_norm_stderr": 0.021302621211654518 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.027732834353363947, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.027732834353363947 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.019373332420724507, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.019373332420724507 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320196, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320196 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755805, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755805 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.032259413526312945, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.032259413526312945 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714847, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714847 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.333116036505867, + "acc_stderr": 0.01203793045151205, + "acc_norm": 0.333116036505867, + "acc_norm_stderr": 0.01203793045151205 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.0158663464013843, + "mc2": 0.4683667146519244, + "mc2_stderr": 0.014981879654024812 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.01717730199234255, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.15", + "model_sha": "db132e6067a6392c99b4bf6d7afda85c7d5f78a8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.18/result_2023-12-07 05:18:25.json b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.18/result_2023-12-07 05:18:25.json new file mode 100644 index 0000000000000000000000000000000000000000..92d2de8c19ae452f31c69a7774271cfcda797f81 --- /dev/null +++ b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.18/result_2023-12-07 05:18:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180647, + "acc_norm": 0.4590443686006826, + "acc_norm_stderr": 0.014562291073601226 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4246166102370046, + "acc_stderr": 0.004932745013072719, + "acc_norm": 0.571400119498108, + "acc_norm_stderr": 0.004938643787869549 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865636, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865636 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.024962683564331827, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.024962683564331827 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684973, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712177, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712177 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602585, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.01964380155792481, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.01964380155792481 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.040598672469526864, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.040598672469526864 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714857, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714857 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846147, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846147 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.015744027248256055, + "mc2": 0.4544067221641174, + "mc2_stderr": 0.014927896908949237 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.01717730199234255, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427122 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.18", + "model_sha": "be58129e9338fbdc42bfc803860d4308f835cd6e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.2/result_2023-11-29 11:13:19.json b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.2/result_2023-11-29 11:13:19.json new file mode 100644 index 0000000000000000000000000000000000000000..0b11b5f1ecf574344bbf93c4545394f7ead73402 --- /dev/null +++ b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.2/result_2023-11-29 11:13:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142818, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.014537144444284738 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41884086835291773, + "acc_stderr": 0.004923609207861539, + "acc_norm": 0.5671181039633539, + "acc_norm_stderr": 0.004944620712318274 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5095785440613027, + "acc_stderr": 0.01787668227534085, + "acc_norm": 0.5095785440613027, + "acc_norm_stderr": 0.01787668227534085 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262971, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262971 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.02800913812540039, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.02800913812540039 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115205, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115205 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0365634365335316, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0365634365335316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206174, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272438, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272438 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281337, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281337 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.021436998359765317, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.021436998359765317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924314, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924314 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.028074158947600666, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.028074158947600666 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223974, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223974 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.03038805130167812, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.03038805130167812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3070404172099087, + "acc_stderr": 0.01178095911451378, + "acc_norm": 0.3070404172099087, + "acc_norm_stderr": 0.01178095911451378 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.01522589934082683, + "mc2": 0.39177380761625485, + "mc2_stderr": 0.014625221380747738 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342558, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.017072525875563103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.2", + "model_sha": "e61e6122ceca6995569c008901a3e1d4a0a58972", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.20/result_2023-12-07 11:05:08.json b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.20/result_2023-12-07 11:05:08.json new file mode 100644 index 0000000000000000000000000000000000000000..8486f8909fd6dbd1ef1116fb09517249fe30bbef --- /dev/null +++ b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.20/result_2023-12-07 11:05:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40955631399317405, + "acc_stderr": 0.014370358632472427, + "acc_norm": 0.4667235494880546, + "acc_norm_stderr": 0.014578995859605814 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4279028082055367, + "acc_stderr": 0.004937635112830286, + "acc_norm": 0.5790679147580163, + "acc_norm_stderr": 0.004926996830194243 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.545338441890166, + "acc_stderr": 0.0178063045850526, + "acc_norm": 0.545338441890166, + "acc_norm_stderr": 0.0178063045850526 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710852, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710852 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849734, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.031342504862454025, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.031342504862454025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891776, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891776 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320186, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320186 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159685, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159685 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330371, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330371 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454892, + "mc2": 0.46011374521819187, + "mc2_stderr": 0.014998848301007965 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.01716818720142925, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.20", + "model_sha": "ea626222a55229e517e4c9f75ba9bbd64cd892a5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/StatPan/mistral7b-bartending-recipe-v1/result_2023-12-29 07:50:46.json b/StatPan/mistral7b-bartending-recipe-v1/result_2023-12-29 07:50:46.json new file mode 100644 index 0000000000000000000000000000000000000000..8fa827b48d613b3671564dca2387ddfadced4493 --- /dev/null +++ b/StatPan/mistral7b-bartending-recipe-v1/result_2023-12-29 07:50:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4249146757679181, + "acc_stderr": 0.014445698968520769, + "acc_norm": 0.5, + "acc_norm_stderr": 0.014611390804670088 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41724756024696275, + "acc_stderr": 0.004920967192255291, + "acc_norm": 0.5492929695279825, + "acc_norm_stderr": 0.004965473894646782 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.561941251596424, + "acc_stderr": 0.017742232238257258, + "acc_norm": 0.561941251596424, + "acc_norm_stderr": 0.017742232238257258 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5434083601286174, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.5434083601286174, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364397, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364397 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.025317649726448677, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.025317649726448677 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.02837228779796294, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.02837228779796294 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.028447965476231022, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.028447965476231022 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808086, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808086 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137602, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.02665880027367238, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.02665880027367238 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5889908256880734, + "acc_stderr": 0.021095050687277656, + "acc_norm": 0.5889908256880734, + "acc_norm_stderr": 0.021095050687277656 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.02006287424353913, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.02006287424353913 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.02872386385328128, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.02872386385328128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653062, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653062 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103986, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103986 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.030862144921087565, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.030862144921087565 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6919831223628692, + "acc_stderr": 0.03005238933560569, + "acc_norm": 0.6919831223628692, + "acc_norm_stderr": 0.03005238933560569 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3546284224250326, + "acc_stderr": 0.01221857643909016, + "acc_norm": 0.3546284224250326, + "acc_norm_stderr": 0.01221857643909016 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3378212974296206, + "mc1_stderr": 0.016557167322516896, + "mc2": 0.5219156106272662, + "mc2_stderr": 0.015613264148505234 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45218417945690675, + "acc_stderr": 0.017111567130916785, + "acc_norm": 0.46162927981109797, + "acc_norm_stderr": 0.01713966022184556 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "StatPan/mistral7b-bartending-recipe-v1", + "model_sha": "5fc07b540bbec555260205e3a9005f55806703da", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/StatPan/singung-dpo-v0.1-2200/result_2023-12-26 13:29:32.json b/StatPan/singung-dpo-v0.1-2200/result_2023-12-26 13:29:32.json new file mode 100644 index 0000000000000000000000000000000000000000..85b78d95bc833a80e0fe244fbd65c168626d54ec --- /dev/null +++ b/StatPan/singung-dpo-v0.1-2200/result_2023-12-26 13:29:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.014111298751674948, + "acc_norm": 0.41552901023890787, + "acc_norm_stderr": 0.0144013666412164 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37950607448715395, + "acc_stderr": 0.004842723234022034, + "acc_norm": 0.48207528380800635, + "acc_norm_stderr": 0.004986573992451693 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913236, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4521072796934866, + "acc_stderr": 0.017797751493865623, + "acc_norm": 0.4521072796934866, + "acc_norm_stderr": 0.017797751493865623 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.028173917761762878, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.028173917761762878 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.035534363688280626, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.035534363688280626 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.040824829046386284, + "acc_norm": 0.4, + "acc_norm_stderr": 0.040824829046386284 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.03424084669891521, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.03424084669891521 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296546, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296546 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347357, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347357 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556538, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556538 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083015, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083015 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987053, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987053 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4925373134328358, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.4925373134328358, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.02450877752102841, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.02450877752102841 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269952, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269952 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.02137049460999509, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.02137049460999509 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.039531733777491945, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.039531733777491945 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088844, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088844 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402544, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402544 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468648, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468648 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.39, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.01196531153657153, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.01196531153657153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237258, + "mc2": 0.45912122204156075, + "mc2_stderr": 0.016449709945328097 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.448642266824085, + "acc_stderr": 0.017099430514725785, + "acc_norm": 0.4970484061393152, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "StatPan/singung-dpo-v0.1-2200", + "model_sha": "cb02b9bf247ef8597485d49647c8d91675609fa2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/StatPan/singung-sft-v0.1/result_2023-12-24 13:47:09.json b/StatPan/singung-sft-v0.1/result_2023-12-24 13:47:09.json new file mode 100644 index 0000000000000000000000000000000000000000..21ed1b5407328517d54e1c06e6abd671149503de --- /dev/null +++ b/StatPan/singung-sft-v0.1/result_2023-12-24 13:47:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3728668941979522, + "acc_stderr": 0.014131176760131172, + "acc_norm": 0.4069965870307167, + "acc_norm_stderr": 0.014356399418009124 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3897629954192392, + "acc_stderr": 0.004866997110388193, + "acc_norm": 0.4962158932483569, + "acc_norm_stderr": 0.004989638507409919 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128919, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128919 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.017867695938429778, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.017867695938429778 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231004, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.03524068951567449, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.03524068951567449 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809446, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809446 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473075, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.024870815251057093, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.024870815251057093 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542595, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.02141822475426465, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.02141822475426465 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215937, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215937 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.01421957078810399, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.01421957078810399 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928006, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.031867859300041296, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.031867859300041296 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.03241920684693334, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.03241920684693334 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849645, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849645 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.4719141578197675, + "mc2_stderr": 0.01599361059010559 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.017107618859549353, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.017182864434998567 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "statpan/singung-sft-v0.1", + "model_sha": "10ebcfa310dafd576f4767819900d3c9c80077e0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Taekyoon/llama2-org-koen-7b/result_2023-11-17 08:17:26.json b/Taekyoon/llama2-org-koen-7b/result_2023-11-17 08:17:26.json new file mode 100644 index 0000000000000000000000000000000000000000..ec9d52d891b442faaeec6d072f65ea5a3fba68a4 --- /dev/null +++ b/Taekyoon/llama2-org-koen-7b/result_2023-11-17 08:17:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3370307167235495, + "acc_stderr": 0.013813476652902279, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892893 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3997211710814579, + "acc_stderr": 0.004888398535520494, + "acc_norm": 0.5370444134634534, + "acc_norm_stderr": 0.0049760677264325615 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.036310534964889056, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.036310534964889056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729245, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729245 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3665389527458493, + "acc_stderr": 0.01723124462679703, + "acc_norm": 0.3665389527458493, + "acc_norm_stderr": 0.01723124462679703 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745664, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745664 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071857, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071857 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307333, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307333 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836554, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836554 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386215, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380558, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380558 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.02110773012724398, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.02110773012724398 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567104, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567104 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918407, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918407 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.037161774375660164, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.037161774375660164 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.025522474632121615, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.025522474632121615 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.345679012345679, + "acc_stderr": 0.026462487777001886, + "acc_norm": 0.345679012345679, + "acc_norm_stderr": 0.026462487777001886 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28440366972477066, + "acc_stderr": 0.01934203658770259, + "acc_norm": 0.28440366972477066, + "acc_norm_stderr": 0.01934203658770259 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102147, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102147 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.027245613047215365, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.027245613047215365 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663137, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663137 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.032149521478027486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.032149521478027486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.02315746830855936, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.02315746830855936 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.029923100563683906, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.029923100563683906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.02917868230484255, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.02917868230484255 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2848761408083442, + "acc_stderr": 0.011527830846368999, + "acc_norm": 0.2848761408083442, + "acc_norm_stderr": 0.011527830846368999 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399813, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399813 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.4237266628764529, + "mc2_stderr": 0.01496751362237835 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2727272727272727, + "acc_stderr": 0.01531185311030035, + "acc_norm": 0.44391971664698937, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Taekyoon/llama2-org-koen-7b", + "model_sha": "869813335f48ec6a8af01c793c0e8705886d3b89", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/The-matt/llama2_ko-7b_distinctive-snowflake-182_1060/result_2023-11-20 01:48:08.json b/The-matt/llama2_ko-7b_distinctive-snowflake-182_1060/result_2023-11-20 01:48:08.json new file mode 100644 index 0000000000000000000000000000000000000000..aa5bc87f1c3bc9958c7bdfe209512c8d35207742 --- /dev/null +++ b/The-matt/llama2_ko-7b_distinctive-snowflake-182_1060/result_2023-11-20 01:48:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3054607508532423, + "acc_stderr": 0.013460080478002494, + "acc_norm": 0.3583617747440273, + "acc_norm_stderr": 0.014012883334859859 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3761202947619996, + "acc_stderr": 0.004834207964061325, + "acc_norm": 0.4910376419040032, + "acc_norm_stderr": 0.004988979750014442 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3282247765006386, + "acc_stderr": 0.01679168564019289, + "acc_norm": 0.3282247765006386, + "acc_norm_stderr": 0.01679168564019289 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.251063829787234, + "acc_stderr": 0.02834696377716245, + "acc_norm": 0.251063829787234, + "acc_norm_stderr": 0.02834696377716245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.034106466140718564, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.034106466140718564 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004917, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004917 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.29596412556053814, + "acc_stderr": 0.0306365913486998, + "acc_norm": 0.29596412556053814, + "acc_norm_stderr": 0.0306365913486998 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03960933549451207, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03960933549451207 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02755361446786382, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02755361446786382 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2076923076923077, + "acc_stderr": 0.0205675395672468, + "acc_norm": 0.2076923076923077, + "acc_norm_stderr": 0.0205675395672468 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.025091892378859275, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.025091892378859275 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.03142616993791925, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.03142616993791925 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594316, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577657, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388676992, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388676992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868052, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868052 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877795, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877795 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26238532110091745, + "acc_stderr": 0.018861885021534738, + "acc_norm": 0.26238532110091745, + "acc_norm_stderr": 0.018861885021534738 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1349206349206349, + "acc_stderr": 0.030557101589417515, + "acc_norm": 0.1349206349206349, + "acc_norm_stderr": 0.030557101589417515 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137283, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137283 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.025767252010855963, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.025767252010855963 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.026882144922307744, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.026882144922307744 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27835723598435463, + "acc_stderr": 0.011446990197380989, + "acc_norm": 0.27835723598435463, + "acc_norm_stderr": 0.011446990197380989 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.0151274270965207, + "mc2": 0.3908977745790188, + "mc2_stderr": 0.014711493002685353 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3305785123966942, + "acc_stderr": 0.0161734232988457, + "acc_norm": 0.4604486422668241, + "acc_norm_stderr": 0.01713648762604985 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "The-matt/llama2_ko-7b_distinctive-snowflake-182_1060", + "model_sha": "090368cb655024491c0c4dad13f8ac9a8e7d31cc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/The-matt/llama2_ko-7b_sandy-fire-170_1530/result_2023-11-13 07:13:52.json b/The-matt/llama2_ko-7b_sandy-fire-170_1530/result_2023-11-13 07:13:52.json new file mode 100644 index 0000000000000000000000000000000000000000..e8a7a6c5f78c0d4fe554e6f26bebb629cb531f3e --- /dev/null +++ b/The-matt/llama2_ko-7b_sandy-fire-170_1530/result_2023-11-13 07:13:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.013592431519068084, + "acc_norm": 0.3728668941979522, + "acc_norm_stderr": 0.014131176760131165 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37532364070902213, + "acc_stderr": 0.004832167854501651, + "acc_norm": 0.48994224258115915, + "acc_norm_stderr": 0.004988771791854509 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.01685739124747255, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.01685739124747255 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3729903536977492, + "acc_stderr": 0.02746661021314012, + "acc_norm": 0.3729903536977492, + "acc_norm_stderr": 0.02746661021314012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.273542600896861, + "acc_stderr": 0.029918586707798834, + "acc_norm": 0.273542600896861, + "acc_norm_stderr": 0.029918586707798834 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438014, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438014 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.0342408466989152, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.0342408466989152 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176892, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176892 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.020932445774463182, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.020932445774463182 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.02598850079241188, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.02598850079241188 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3247863247863248, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.3247863247863248, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199593, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199593 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302505, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302505 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823018, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573033, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.1875, + "acc_stderr": 0.032639560491693344, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.032639560491693344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.02440517393578324, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.02440517393578324 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.034624199316156234, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.034624199316156234 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.31790123456790126, + "acc_stderr": 0.025910063528240865, + "acc_norm": 0.31790123456790126, + "acc_norm_stderr": 0.025910063528240865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877793, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877793 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3100917431192661, + "acc_stderr": 0.01983084968443975, + "acc_norm": 0.3100917431192661, + "acc_norm_stderr": 0.01983084968443975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.026857294663281413, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.026857294663281413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31209150326797386, + "acc_stderr": 0.01874501120127766, + "acc_norm": 0.31209150326797386, + "acc_norm_stderr": 0.01874501120127766 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432403, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012376, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012376 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841195, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2653194263363755, + "acc_stderr": 0.011276198843958876, + "acc_norm": 0.2653194263363755, + "acc_norm_stderr": 0.011276198843958876 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511785, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511785 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156475, + "mc2": 0.3966274374680779, + "mc2_stderr": 0.014846518193358589 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3695395513577332, + "acc_stderr": 0.01659488340568542, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.01718401506040145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "The-matt/llama2_ko-7b_sandy-fire-170_1530", + "model_sha": "b963fcf8d7249c3f360ccfa5db70c0b20bddeb08", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/The-matt/llama2_ko-7b_stilted-lion-205_1530/result_2023-11-23 01:33:10.json b/The-matt/llama2_ko-7b_stilted-lion-205_1530/result_2023-11-23 01:33:10.json new file mode 100644 index 0000000000000000000000000000000000000000..5f8278b9e10027778090a2d6d397abb73b87fb20 --- /dev/null +++ b/The-matt/llama2_ko-7b_stilted-lion-205_1530/result_2023-11-23 01:33:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31569965870307165, + "acc_stderr": 0.013582571095815291, + "acc_norm": 0.36945392491467577, + "acc_norm_stderr": 0.014104578366491902 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3768173670583549, + "acc_stderr": 0.004835981632401594, + "acc_norm": 0.4987054371639116, + "acc_norm_stderr": 0.004989764686738838 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3128991060025543, + "acc_stderr": 0.016580935940304055, + "acc_norm": 0.3128991060025543, + "acc_norm_stderr": 0.016580935940304055 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.02802022627120022, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.02802022627120022 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3247588424437299, + "acc_stderr": 0.026596782287697046, + "acc_norm": 0.3247588424437299, + "acc_norm_stderr": 0.026596782287697046 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2914798206278027, + "acc_stderr": 0.030500283176545906, + "acc_norm": 0.2914798206278027, + "acc_norm_stderr": 0.030500283176545906 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185555, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185555 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341947, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341947 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.021193632525148522, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.021193632525148522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3418803418803419, + "acc_stderr": 0.031075028526507755, + "acc_norm": 0.3418803418803419, + "acc_norm_stderr": 0.031075028526507755 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.028985455652334395, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.028985455652334395 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.36318407960199006, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.36318407960199006, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788989, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788989 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252603, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252603 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.024027745155265016, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.024027745155265016 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615625, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615625 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.024569223600460845, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.024569223600460845 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.033088185944157494, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.033088185944157494 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3467889908256881, + "acc_stderr": 0.020406097104093027, + "acc_norm": 0.3467889908256881, + "acc_norm_stderr": 0.020406097104093027 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.033954900208561116, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.033954900208561116 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.027475969910660952, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.027475969910660952 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.044492703500683815, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.044492703500683815 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.01716058723504635, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.01716058723504635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290403, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271486, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31223628691983124, + "acc_stderr": 0.03016513786784701, + "acc_norm": 0.31223628691983124, + "acc_norm_stderr": 0.03016513786784701 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2757496740547588, + "acc_stderr": 0.011413813609161005, + "acc_norm": 0.2757496740547588, + "acc_norm_stderr": 0.011413813609161005 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.03198001660115073, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.03198001660115073 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.3905558403820087, + "mc2_stderr": 0.014722115029998253 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33766233766233766, + "acc_stderr": 0.01625907578475496, + "acc_norm": 0.4639905548996458, + "acc_norm_stderr": 0.017145715365486654 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "The-matt/llama2_ko-7b_stilted-lion-205_1530", + "model_sha": "948480784c612e413d857c89d2a343b32c704498", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TheBloke/Llama-2-13B-fp16/result_2023-09-27 04:58:32.json b/TheBloke/Llama-2-13B-fp16/result_2023-09-27 04:58:32.json new file mode 100644 index 0000000000000000000000000000000000000000..f24caff54f7f326da7a536d77030aded372e3f46 --- /dev/null +++ b/TheBloke/Llama-2-13B-fp16/result_2023-09-27 04:58:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.013621696119173302, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407166 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36138219478191597, + "acc_stderr": 0.004794191785967945, + "acc_norm": 0.46614220274845647, + "acc_norm_stderr": 0.004978328190775522 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4789272030651341, + "acc_stderr": 0.0178640767862129, + "acc_norm": 0.4789272030651341, + "acc_norm_stderr": 0.0178640767862129 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665232, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665232 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.03222414045241107, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.03222414045241107 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.029946498567699945, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.029946498567699945 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276611, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276611 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.0272725828498398, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.0272725828498398 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384486, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584926, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584926 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44770642201834865, + "acc_stderr": 0.021319754962425462, + "acc_norm": 0.44770642201834865, + "acc_norm_stderr": 0.021319754962425462 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319774, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319774 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706207, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101373, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101373 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.39662447257383965, + "acc_stderr": 0.03184399873811226, + "acc_norm": 0.39662447257383965, + "acc_norm_stderr": 0.03184399873811226 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31681877444589307, + "acc_stderr": 0.011882349954723015, + "acc_norm": 0.31681877444589307, + "acc_norm_stderr": 0.011882349954723015 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482466, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482466 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.41395274449910313, + "mc2_stderr": 0.015033140507060082 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3825265643447462, + "acc_stderr": 0.016709165387228806, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.017173944474294378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TheBloke/Llama-2-13B-fp16", + "model_sha": "b2e65e8ad4bb35e5abaee0170ebd5fc2134a50bb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TinyLlama/TinyLlama-1.1B-intermediate-step-715k-1.5T/result_2023-11-16 08:16:14.json b/TinyLlama/TinyLlama-1.1B-intermediate-step-715k-1.5T/result_2023-11-16 08:16:14.json new file mode 100644 index 0000000000000000000000000000000000000000..7e5ad50ce5571df5dbca09da85705b2c285b905e --- /dev/null +++ b/TinyLlama/TinyLlama-1.1B-intermediate-step-715k-1.5T/result_2023-11-16 08:16:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2167235494880546, + "acc_stderr": 0.012040156713481192, + "acc_norm": 0.25597269624573377, + "acc_norm_stderr": 0.012753013241244513 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2819159529974109, + "acc_stderr": 0.004490130691020431, + "acc_norm": 0.3150766779525991, + "acc_norm_stderr": 0.004635970060392421 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2046783625730994, + "acc_stderr": 0.03094445977853321, + "acc_norm": 0.2046783625730994, + "acc_norm_stderr": 0.03094445977853321 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26436781609195403, + "acc_stderr": 0.01576998484069053, + "acc_norm": 0.26436781609195403, + "acc_norm_stderr": 0.01576998484069053 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.1925925925925926, + "acc_stderr": 0.03406542058502652, + "acc_norm": 0.1925925925925926, + "acc_norm_stderr": 0.03406542058502652 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.029513196625539355, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.029513196625539355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322416, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322416 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.025583062489984838, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.025583062489984838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.031602951437766785, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.031602951437766785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1919191919191919, + "acc_stderr": 0.02805779167298901, + "acc_norm": 0.1919191919191919, + "acc_norm_stderr": 0.02805779167298901 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.0395058186117996, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.0395058186117996 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.02738140692786898, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.02738140692786898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.16748768472906403, + "acc_stderr": 0.026273086047535414, + "acc_norm": 0.16748768472906403, + "acc_norm_stderr": 0.026273086047535414 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.20967741935483872, + "acc_stderr": 0.02315787934908353, + "acc_norm": 0.20967741935483872, + "acc_norm_stderr": 0.02315787934908353 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.02974504857267406, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.02974504857267406 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.02461829819586651, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02461829819586651 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959312, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959312 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21393034825870647, + "acc_stderr": 0.028996909693328903, + "acc_norm": 0.21393034825870647, + "acc_norm_stderr": 0.028996909693328903 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1791907514450867, + "acc_stderr": 0.02924251305906329, + "acc_norm": 0.1791907514450867, + "acc_norm_stderr": 0.02924251305906329 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.22486772486772486, + "acc_stderr": 0.021502096078229147, + "acc_norm": 0.22486772486772486, + "acc_norm_stderr": 0.021502096078229147 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577615, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577615 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.03322015795776742, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.03322015795776742 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23765432098765432, + "acc_stderr": 0.023683591837008553, + "acc_norm": 0.23765432098765432, + "acc_norm_stderr": 0.023683591837008553 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1908256880733945, + "acc_stderr": 0.016847676400091115, + "acc_norm": 0.1908256880733945, + "acc_norm_stderr": 0.016847676400091115 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.0339549002085611, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.0339549002085611 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.040261875275912073, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.040261875275912073 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.017322789207784326, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.017322789207784326 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432403, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859655, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859655 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976266, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976266 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.02423101337054111, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.02423101337054111 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17959183673469387, + "acc_stderr": 0.024573293589585637, + "acc_norm": 0.17959183673469387, + "acc_norm_stderr": 0.024573293589585637 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23533246414602346, + "acc_stderr": 0.010834432543912224, + "acc_norm": 0.23533246414602346, + "acc_norm_stderr": 0.010834432543912224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559693, + "mc2": 0.5030438206753587, + "mc2_stderr": 0.016137949960889377 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24203069657615112, + "acc_stderr": 0.014725696750525331, + "acc_norm": 0.3105076741440378, + "acc_norm_stderr": 0.01590800452876203 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TinyLlama/TinyLlama-1.1B-intermediate-step-715k-1.5T", + "model_sha": "314e0f65d90384e224ac8d7c0b228a661a06673f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TinyPixel/Llama-2-7B-bf16-sharded/result_2023-12-26 01:06:07.json b/TinyPixel/Llama-2-7B-bf16-sharded/result_2023-12-26 01:06:07.json new file mode 100644 index 0000000000000000000000000000000000000000..c01e29366a36a3c81688733ef99f47fbed125cba --- /dev/null +++ b/TinyPixel/Llama-2-7B-bf16-sharded/result_2023-12-26 01:06:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2713310580204778, + "acc_stderr": 0.012993807727545789, + "acc_norm": 0.3097269624573379, + "acc_norm_stderr": 0.013512058415238361 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3331009759012149, + "acc_stderr": 0.004703590558552501, + "acc_norm": 0.41127265484963155, + "acc_norm_stderr": 0.004910588449330016 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.04541609446503947, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.04541609446503947 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.017268607560005773, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.017268607560005773 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785138, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785138 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761923, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761923 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071857, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071857 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.02666441088693762, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.02666441088693762 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424387, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424387 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416545, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416545 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886845, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886845 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30256410256410254, + "acc_stderr": 0.023290888053772725, + "acc_norm": 0.30256410256410254, + "acc_norm_stderr": 0.023290888053772725 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567104, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567104 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49145299145299143, + "acc_stderr": 0.032751303000970296, + "acc_norm": 0.49145299145299143, + "acc_norm_stderr": 0.032751303000970296 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30566037735849055, + "acc_stderr": 0.02835329807332267, + "acc_norm": 0.30566037735849055, + "acc_norm_stderr": 0.02835329807332267 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505415, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505415 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230172, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230172 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.033367670865679766, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.033367670865679766 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4527363184079602, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.4527363184079602, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633356, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633356 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.025992472029306386, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.025992472029306386 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.36728395061728397, + "acc_stderr": 0.02682280175950789, + "acc_norm": 0.36728395061728397, + "acc_norm_stderr": 0.02682280175950789 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3471502590673575, + "acc_stderr": 0.03435696168361356, + "acc_norm": 0.3471502590673575, + "acc_norm_stderr": 0.03435696168361356 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28990825688073396, + "acc_stderr": 0.019453066609201604, + "acc_norm": 0.28990825688073396, + "acc_norm_stderr": 0.019453066609201604 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283683, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283683 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351586, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351586 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.018663359671463667, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.018663359671463667 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.025336848563332386, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.025336848563332386 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301854, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301854 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.01134599674353925, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.01134599674353925 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.03713158067481912, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.03713158067481912 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006516, + "mc2": 0.43916554694905735, + "mc2_stderr": 0.015333673661914711 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27036599763872493, + "acc_stderr": 0.015270152942068405, + "acc_norm": 0.3530106257378985, + "acc_norm_stderr": 0.016430745982427126 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TinyPixel/Llama-2-7B-bf16-sharded", + "model_sha": "3f5d08bf8c31192686e3e88d0b9d2cdeff4115e4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Trofish/KULLM-RLHF/result_2023-09-28 05:43:12.json b/Trofish/KULLM-RLHF/result_2023-09-28 05:43:12.json new file mode 100644 index 0000000000000000000000000000000000000000..183d232b971cbdbf85b4f6311d06a25c16cf3249 --- /dev/null +++ b/Trofish/KULLM-RLHF/result_2023-09-28 05:43:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2790102389078498, + "acc_stderr": 0.013106784883601352, + "acc_norm": 0.3199658703071672, + "acc_norm_stderr": 0.013631345807016198 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3842859988050189, + "acc_stderr": 0.004854318994447741, + "acc_norm": 0.4954192391953794, + "acc_norm_stderr": 0.0049895720021966876 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03565079670708313, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03565079670708313 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.039166677628225836, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.039166677628225836 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2848020434227331, + "acc_stderr": 0.016139174096522563, + "acc_norm": 0.2848020434227331, + "acc_norm_stderr": 0.016139174096522563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.03106939026078942, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.03106939026078942 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26688102893890675, + "acc_stderr": 0.025122637608816653, + "acc_norm": 0.26688102893890675, + "acc_norm_stderr": 0.025122637608816653 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.029442495585857473, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.029442495585857473 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.1984732824427481, + "acc_stderr": 0.034981493854624686, + "acc_norm": 0.1984732824427481, + "acc_norm_stderr": 0.034981493854624686 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438014, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438014 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149353, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149353 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24516129032258063, + "acc_stderr": 0.024472243840895518, + "acc_norm": 0.24516129032258063, + "acc_norm_stderr": 0.024472243840895518 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541198, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541198 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.026480357179895688, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.026480357179895688 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.033742355504256936, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.033742355504256936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655805, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655805 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826371, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826371 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526501, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526501 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924034, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924034 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.025702640260603767, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.025702640260603767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.018272575810231857, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.018272575810231857 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.033954900208561116, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.033954900208561116 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.018373116915903966, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.018373116915903966 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880582, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880582 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046937, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16911764705882354, + "acc_stderr": 0.022770868010113014, + "acc_norm": 0.16911764705882354, + "acc_norm_stderr": 0.022770868010113014 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.02783302387139968, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.02783302387139968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24967405475880053, + "acc_stderr": 0.011054538377832318, + "acc_norm": 0.24967405475880053, + "acc_norm_stderr": 0.011054538377832318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507957, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507957 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268047, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268047 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.38771109052404834, + "mc2_stderr": 0.014784638195990142 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31641086186540734, + "acc_stderr": 0.015989617951065477, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.016836377292849296 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Trofish/KULLM-RLHF", + "model_sha": "ba40edd22e913ab8170e3c78035d8d9057d31fba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Walmart-the-bag/Yi-6B-Infinity-Chat/result_2023-12-24 01:33:21.json b/Walmart-the-bag/Yi-6B-Infinity-Chat/result_2023-12-24 01:33:21.json new file mode 100644 index 0000000000000000000000000000000000000000..4bd17b2132abdee055d9283d3d3c251115e3baac --- /dev/null +++ b/Walmart-the-bag/Yi-6B-Infinity-Chat/result_2023-12-24 01:33:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22696245733788395, + "acc_stderr": 0.012240491536132873, + "acc_norm": 0.26535836177474403, + "acc_norm_stderr": 0.012902554762313967 + }, + "harness|ko_hellaswag|10": { + "acc": 0.30521808404700257, + "acc_stderr": 0.00459558602758378, + "acc_norm": 0.35610436168094006, + "acc_norm_stderr": 0.004778679507786504 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259264016, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259264016 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128919, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128919 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3524904214559387, + "acc_stderr": 0.017084150244081373, + "acc_norm": 0.3524904214559387, + "acc_norm_stderr": 0.017084150244081373 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.02924188386962881, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.02924188386962881 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40514469453376206, + "acc_stderr": 0.02788238379132595, + "acc_norm": 0.40514469453376206, + "acc_norm_stderr": 0.02788238379132595 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.029442495585857473, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.029442495585857473 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.035029757994130065, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.035029757994130065 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.04161808503501528, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.04161808503501528 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.02394672474156397, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.02394672474156397 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651047, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.04653429807913508, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.04653429807913508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176085, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176085 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3765432098765432, + "acc_stderr": 0.02695934451874778, + "acc_norm": 0.3765432098765432, + "acc_norm_stderr": 0.02695934451874778 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.03561587327685885, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.03561587327685885 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.038924311065187525, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.038924311065187525 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3743119266055046, + "acc_stderr": 0.020748959408988327, + "acc_norm": 0.3743119266055046, + "acc_norm_stderr": 0.020748959408988327 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141114, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141114 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495155, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495155 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.01467625200931947, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.01467625200931947 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.03078154910202622, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.03078154910202622 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715924, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715924 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247272, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247272 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.016185744355144905, + "mc2": 0.4818389603284346, + "mc2_stderr": 0.016049632203664795 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3600944510035419, + "acc_stderr": 0.016503686720440076, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191385 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Walmart-the-bag/Yi-6B-Infinity-Chat", + "model_sha": "7a441a69e1ebd192fbf52b904589130c3875aacc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-lora-653/result_2023-12-28 18:49:53.json b/We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-lora-653/result_2023-12-28 18:49:53.json new file mode 100644 index 0000000000000000000000000000000000000000..59e35428ca8f85d6ce6a261f40a887b2048ee555 --- /dev/null +++ b/We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-lora-653/result_2023-12-28 18:49:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44283276450511944, + "acc_stderr": 0.014515573873348899, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947087 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44742083250348536, + "acc_stderr": 0.004962115526014299, + "acc_norm": 0.6081457876916949, + "acc_norm_stderr": 0.004871667371060538 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6628352490421456, + "acc_stderr": 0.016905207420803547, + "acc_norm": 0.6628352490421456, + "acc_norm_stderr": 0.016905207420803547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4978723404255319, + "acc_stderr": 0.032685726586674915, + "acc_norm": 0.4978723404255319, + "acc_norm_stderr": 0.032685726586674915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333045, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333045 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5884244372990354, + "acc_stderr": 0.027950481494401262, + "acc_norm": 0.5884244372990354, + "acc_norm_stderr": 0.027950481494401262 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713546, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713546 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.048108401480826346, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.048108401480826346 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6258064516129033, + "acc_stderr": 0.027528904299845693, + "acc_norm": 0.6258064516129033, + "acc_norm_stderr": 0.027528904299845693 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.02931820364520686, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.02931820364520686 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.032801882053486414, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.032801882053486414 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138204, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138204 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.02723741509459248, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.02723741509459248 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.04685473041907789, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.04685473041907789 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6880733944954128, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.6880733944954128, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.028275490156791455, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.028275490156791455 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.020219083895133924, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.020219083895133924 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.0293922365846125, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.0293922365846125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.013754835975482351, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.013754835975482351 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5110294117647058, + "acc_stderr": 0.030365446477275668, + "acc_norm": 0.5110294117647058, + "acc_norm_stderr": 0.030365446477275668 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7763713080168776, + "acc_stderr": 0.027123298205229966, + "acc_norm": 0.7763713080168776, + "acc_norm_stderr": 0.027123298205229966 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3983050847457627, + "acc_stderr": 0.012503310565166235, + "acc_norm": 0.3983050847457627, + "acc_norm_stderr": 0.012503310565166235 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6862745098039216, + "acc_stderr": 0.03256685484460388, + "acc_norm": 0.6862745098039216, + "acc_norm_stderr": 0.03256685484460388 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.46216492501026685, + "mc2_stderr": 0.015258648943320074 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.01719034212344866, + "acc_norm": 0.5478158205430933, + "acc_norm_stderr": 0.0171115671309168 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-lora-653", + "model_sha": "9ba1f722d3c97105e544eb7585e4f57c4ad2fd28", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-math/result_2023-12-30 12:27:09.json b/We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-math/result_2023-12-30 12:27:09.json new file mode 100644 index 0000000000000000000000000000000000000000..91fb45956f7b1f0d7434f822161ca9bc7e665047 --- /dev/null +++ b/We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-math/result_2023-12-30 12:27:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.45819112627986347, + "acc_stderr": 0.014560220308714691, + "acc_norm": 0.5110921501706485, + "acc_norm_stderr": 0.01460779491401305 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4522007568213503, + "acc_stderr": 0.004966928094797572, + "acc_norm": 0.6169089822744473, + "acc_norm_stderr": 0.0048514666236014505 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6564495530012772, + "acc_stderr": 0.016982145632652473, + "acc_norm": 0.6564495530012772, + "acc_norm_stderr": 0.016982145632652473 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.027731258647012, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.027731258647012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5919282511210763, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.5919282511210763, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6428571428571429, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.6428571428571429, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.02531063925493387, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.02531063925493387 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.034953345821629324, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.034953345821629324 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6451612903225806, + "acc_stderr": 0.027218889773308774, + "acc_norm": 0.6451612903225806, + "acc_norm_stderr": 0.027218889773308774 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.026453508054040332, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.026453508054040332 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.02977384701253297, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.02977384701253297 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.03220024104534206, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.03220024104534206 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404907, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404907 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5625, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5982658959537572, + "acc_stderr": 0.02639410417764363, + "acc_norm": 0.5982658959537572, + "acc_norm_stderr": 0.02639410417764363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6141975308641975, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.6141975308641975, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7137614678899082, + "acc_stderr": 0.019379436628919965, + "acc_norm": 0.7137614678899082, + "acc_norm_stderr": 0.019379436628919965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.03941897526516304, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.03941897526516304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5081699346405228, + "acc_stderr": 0.02022513434305727, + "acc_norm": 0.5081699346405228, + "acc_norm_stderr": 0.02022513434305727 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.029462189233370593, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.029462189233370593 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18324022346368715, + "acc_stderr": 0.012938645613066388, + "acc_norm": 0.18324022346368715, + "acc_norm_stderr": 0.012938645613066388 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.030306257722468304, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.030306257722468304 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.02798569938703642, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.02798569938703642 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.409387222946545, + "acc_stderr": 0.012558780895570753, + "acc_norm": 0.409387222946545, + "acc_norm_stderr": 0.012558780895570753 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.03228210387037892, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.03228210387037892 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.4597238820015034, + "mc2_stderr": 0.015281888880381102 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5371900826446281, + "acc_stderr": 0.017142736117643304, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-math", + "model_sha": "2ddabd5a5902f3154a13576cf57c747141aae375", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/Yi-Ko-6B-DPO-v2/result_2023-12-27 13:41:49.json b/We-Want-GPU/Yi-Ko-6B-DPO-v2/result_2023-12-27 13:41:49.json new file mode 100644 index 0000000000000000000000000000000000000000..fcbe5f62e8a251e8ef79280003c9e8905b73bf18 --- /dev/null +++ b/We-Want-GPU/Yi-Ko-6B-DPO-v2/result_2023-12-27 13:41:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34982935153583616, + "acc_stderr": 0.013936809212158287, + "acc_norm": 0.4112627986348123, + "acc_norm_stderr": 0.014379441068522082 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40071698864767974, + "acc_stderr": 0.004890422457747264, + "acc_norm": 0.5447122087233619, + "acc_norm_stderr": 0.004969790407117537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299794, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299794 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016336, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016336 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207763, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207763 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.02500732988246122, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.02500732988246122 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267436, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267436 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112143, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112143 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.026788811931562757, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.026788811931562757 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5559633027522936, + "acc_stderr": 0.021302621211654525, + "acc_norm": 0.5559633027522936, + "acc_norm_stderr": 0.021302621211654525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.020007912739359368, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.020007912739359368 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320203, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850426, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396563, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396563 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3220338983050847, + "acc_stderr": 0.011933936071891091, + "acc_norm": 0.3220338983050847, + "acc_norm_stderr": 0.011933936071891091 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.01600265148736101, + "mc2": 0.4398690513425212, + "mc2_stderr": 0.014823613510260732 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.538370720188902, + "acc_stderr": 0.01713966022184555, + "acc_norm": 0.6340023612750886, + "acc_norm_stderr": 0.016561489664895714 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/Yi-Ko-6B-DPO-v2", + "model_sha": "0bad3014188eae5bd260d536043b3794b0a223d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora-DPO/result_2023-12-21 01:28:33.json b/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora-DPO/result_2023-12-21 01:28:33.json new file mode 100644 index 0000000000000000000000000000000000000000..84493ea8eb2cb06a384468498fc79b50c0bd25cb --- /dev/null +++ b/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora-DPO/result_2023-12-21 01:28:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34726962457337884, + "acc_stderr": 0.013913034529620448, + "acc_norm": 0.4069965870307167, + "acc_norm_stderr": 0.014356399418009124 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4026090420235013, + "acc_stderr": 0.004894210011303206, + "acc_norm": 0.5459071898028282, + "acc_norm_stderr": 0.004968705270086754 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5798212005108557, + "acc_stderr": 0.017650651363078026, + "acc_norm": 0.5798212005108557, + "acc_norm_stderr": 0.017650651363078026 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977978, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977978 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.03238546948758979, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.03238546948758979 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539753, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539753 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.02987257770889118, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.02987257770889118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165582, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165582 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112147, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112147 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5596330275229358, + "acc_stderr": 0.02128431062376154, + "acc_norm": 0.5596330275229358, + "acc_norm_stderr": 0.02128431062376154 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.020017629214213097, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.020017629214213097 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605607, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605607 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.01453033020146863, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.01453033020146863 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.0317229500433233, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.0317229500433233 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.01202012819598576, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.01202012819598576 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.43100948993726446, + "mc2_stderr": 0.014880256414468152 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5466351829988194, + "acc_stderr": 0.01711541822522687, + "acc_norm": 0.6233766233766234, + "acc_norm_stderr": 0.01665879987405199 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora-DPO", + "model_sha": "8ce4fd805d5ad4005edeea8ff54735ddf2a00965", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora/result_2023-12-19 12:16:36.json b/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora/result_2023-12-19 12:16:36.json new file mode 100644 index 0000000000000000000000000000000000000000..58af85d89d1f8a65473d971795208ad3bbcf6de4 --- /dev/null +++ b/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora/result_2023-12-19 12:16:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34726962457337884, + "acc_stderr": 0.013913034529620448, + "acc_norm": 0.4035836177474403, + "acc_norm_stderr": 0.014337158914268447 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4041027683728341, + "acc_stderr": 0.004897146690596249, + "acc_norm": 0.5429197371041625, + "acc_norm_stderr": 0.004971364031062591 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5772669220945083, + "acc_stderr": 0.01766518035195406, + "acc_norm": 0.5772669220945083, + "acc_norm_stderr": 0.01766518035195406 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511326, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511326 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.03023638994217309, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.03023638994217309 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.035839017547364106, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.035839017547364106 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.0240268463928735, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.0240268463928735 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5669724770642202, + "acc_stderr": 0.021244146569074338, + "acc_norm": 0.5669724770642202, + "acc_norm_stderr": 0.021244146569074338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.020007912739359365, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.020007912739359365 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042394, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042394 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422263, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422263 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635317, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635317 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898435, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898435 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464754, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464754 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.4203099661058685, + "mc2_stderr": 0.014945969463174807 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5631641086186541, + "acc_stderr": 0.017052633559856062, + "acc_norm": 0.6493506493506493, + "acc_norm_stderr": 0.016405556903893306 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora", + "model_sha": "874983de63cdc8ccd67ee342feb04a79895fa7e6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math/result_2023-12-15 15:42:33.json b/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math/result_2023-12-15 15:42:33.json new file mode 100644 index 0000000000000000000000000000000000000000..ed1a5e751a3ff7576e9f63970570f9827c7b1124 --- /dev/null +++ b/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math/result_2023-12-15 15:42:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179342, + "acc_norm": 0.39505119453924914, + "acc_norm_stderr": 0.014285898292938169 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3994224258115913, + "acc_stderr": 0.004887787255353494, + "acc_norm": 0.5222067317267477, + "acc_norm_stderr": 0.004984857671187101 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370606, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370606 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.01787574884024241, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.01787574884024241 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.0283332771095628, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.0283332771095628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.03524068951567447, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.03524068951567447 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.03175367846096625, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.03175367846096625 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347357, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347357 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.031426169937919246, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.031426169937919246 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.047245774057315705, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.047245774057315705 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.02375292871211213, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.02375292871211213 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5229357798165137, + "acc_stderr": 0.021414757058175506, + "acc_norm": 0.5229357798165137, + "acc_norm_stderr": 0.021414757058175506 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460994, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095268, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095268 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625166, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625166 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715938, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715938 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.0345423658538061, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.0345423658538061 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.451806214348065, + "mc2_stderr": 0.01530167301756722 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836442, + "acc_norm": 0.5478158205430933, + "acc_norm_stderr": 0.0171115671309168 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math", + "model_sha": "9e0ae0ba6a5f0bab33b892ca57727c5c63fb908a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/Yi-Ko-SFT-FULL/result_2023-12-10 23:13:29.json b/We-Want-GPU/Yi-Ko-SFT-FULL/result_2023-12-10 23:13:29.json new file mode 100644 index 0000000000000000000000000000000000000000..1b7d3b6029a63f627c36e030e774d3428982241a --- /dev/null +++ b/We-Want-GPU/Yi-Ko-SFT-FULL/result_2023-12-10 23:13:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.013659980894277378, + "acc_norm": 0.3660409556313993, + "acc_norm_stderr": 0.01407722310847014 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3783110934076877, + "acc_stderr": 0.004839746491523513, + "acc_norm": 0.49133638717386974, + "acc_norm_stderr": 0.004989032307320727 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.017862091778507866, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.017862091778507866 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.02815023224453559, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.02815023224453559 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923323, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923323 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.043820947055509894, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.043820947055509894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876719, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876719 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.03332769068410789, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.03332769068410789 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761554, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761554 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.030197611600197953, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.030197611600197953 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228416, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047732, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047732 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3959537572254335, + "acc_stderr": 0.02632981334194626, + "acc_norm": 0.3959537572254335, + "acc_norm_stderr": 0.02632981334194626 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871595, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5155963302752293, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.5155963302752293, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981749, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981749 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.01954210156485411, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.01954210156485411 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536023, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536023 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976253, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976253 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898445, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30378096479791394, + "acc_stderr": 0.011745787720472458, + "acc_norm": 0.30378096479791394, + "acc_norm_stderr": 0.011745787720472458 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834559, + "mc2": 0.4291269663326706, + "mc2_stderr": 0.015080663366587326 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3412042502951594, + "acc_stderr": 0.016300368742137306, + "acc_norm": 0.41086186540731995, + "acc_norm_stderr": 0.016914972767841062 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/Yi-Ko-SFT-FULL", + "model_sha": "f5c4893445511c2dd803297fb967d4af66a86c67", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/yi-ko-SFT-LoRA-play-re-noprompt/result_2023-12-06 08:58:22.json b/We-Want-GPU/yi-ko-SFT-LoRA-play-re-noprompt/result_2023-12-06 08:58:22.json new file mode 100644 index 0000000000000000000000000000000000000000..cf2627986e4f80be3ecce067c21b2db59f2c582c --- /dev/null +++ b/We-Want-GPU/yi-ko-SFT-LoRA-play-re-noprompt/result_2023-12-06 08:58:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.013760988200880534, + "acc_norm": 0.3967576791808874, + "acc_norm_stderr": 0.014296513020180637 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38707428799044014, + "acc_stderr": 0.004860854240821965, + "acc_norm": 0.5099581756622187, + "acc_norm_stderr": 0.004988791687322851 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533253, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977978, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977978 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177687, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177687 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.028441638233540515, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.028441638233540515 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.035161847729521675, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.035161847729521675 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112143, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112143 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144807, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5651376146788991, + "acc_stderr": 0.021254631465609273, + "acc_norm": 0.5651376146788991, + "acc_norm_stderr": 0.021254631465609273 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883034, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883034 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.040516463428741406, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.040516463428741406 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354154, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354154 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468636, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468636 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225418, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225418 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.03113088039623592, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.03113088039623592 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214936, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214936 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087298, + "mc2": 0.41549106272132785, + "mc2_stderr": 0.014935916833078716 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077304, + "acc_norm": 0.5820543093270366, + "acc_norm_stderr": 0.01695729200527971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/yi-ko-SFT-LoRA-play-re-noprompt", + "model_sha": "40ceba7ecae5a639e5facb03c229a0ac29869d9f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Xwin-LM/Xwin-LM-7B-V0.2/result_2023-11-16 02:03:09.json b/Xwin-LM/Xwin-LM-7B-V0.2/result_2023-11-16 02:03:09.json new file mode 100644 index 0000000000000000000000000000000000000000..271b236acde2bd651c60e6ae6a2d065abd092924 --- /dev/null +++ b/Xwin-LM/Xwin-LM-7B-V0.2/result_2023-11-16 02:03:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29266211604095566, + "acc_stderr": 0.013295916103619403, + "acc_norm": 0.33276450511945393, + "acc_norm_stderr": 0.013769863046192305 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35321649073889666, + "acc_stderr": 0.004769924131304646, + "acc_norm": 0.4219279028082055, + "acc_norm_stderr": 0.004928578106026368 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4112388250319285, + "acc_stderr": 0.017595971908056573, + "acc_norm": 0.4112388250319285, + "acc_norm_stderr": 0.017595971908056573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880557, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880557 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.027950481494401255, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.027950481494401255 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.03446897738659333, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.03446897738659333 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03104194130405927, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03104194130405927 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28717948717948716, + "acc_stderr": 0.02293992541853061, + "acc_norm": 0.28717948717948716, + "acc_norm_stderr": 0.02293992541853061 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3741935483870968, + "acc_stderr": 0.027528904299845777, + "acc_norm": 0.3741935483870968, + "acc_norm_stderr": 0.027528904299845777 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5427350427350427, + "acc_stderr": 0.03263622596380688, + "acc_norm": 0.5427350427350427, + "acc_norm_stderr": 0.03263622596380688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.02922452646912479, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.02922452646912479 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247077, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247077 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.02345603738398202, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.02345603738398202 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.026362437574546538, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.026362437574546538 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3577981651376147, + "acc_stderr": 0.020552060784827818, + "acc_norm": 0.3577981651376147, + "acc_norm_stderr": 0.020552060784827818 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.019139943748487036, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.019139943748487036 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631157, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21323529411764705, + "acc_stderr": 0.024880971512294275, + "acc_norm": 0.21323529411764705, + "acc_norm_stderr": 0.024880971512294275 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301843, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301843 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2835723598435463, + "acc_stderr": 0.011511900775968312, + "acc_norm": 0.2835723598435463, + "acc_norm_stderr": 0.011511900775968312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4620630521284231, + "mc2_stderr": 0.016151515792492546 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.015141752199573208, + "acc_norm": 0.31286894923258557, + "acc_norm_stderr": 0.015941010118302658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Xwin-LM/Xwin-LM-7B-V0.2", + "model_sha": "6e401a3d621f91f751d4dc97be1d6289325a8306", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Yhyu13/LMCocktail-10.7B-v1/result_2023-12-23 15:58:05.json b/Yhyu13/LMCocktail-10.7B-v1/result_2023-12-23 15:58:05.json new file mode 100644 index 0000000000000000000000000000000000000000..ed152bd69a6e1482363a3c5f224f3215b974490d --- /dev/null +++ b/Yhyu13/LMCocktail-10.7B-v1/result_2023-12-23 15:58:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3771331058020478, + "acc_stderr": 0.014163366896192603, + "acc_norm": 0.4709897610921502, + "acc_norm_stderr": 0.014586776355294312 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39713204540928104, + "acc_stderr": 0.00488303775891996, + "acc_norm": 0.5318661621190998, + "acc_norm_stderr": 0.004979637330230311 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5900383141762452, + "acc_stderr": 0.01758767231233604, + "acc_norm": 0.5900383141762452, + "acc_norm_stderr": 0.01758767231233604 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.02819640057419742, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.02819640057419742 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.041641887201693775, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.041641887201693775 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.025334667080954897, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.025334667080954897 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389177, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389177 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066485, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364763, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364763 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137595, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137595 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.027586006221607708, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.027586006221607708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6787564766839378, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.6787564766839378, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6220183486238532, + "acc_stderr": 0.02078918706672811, + "acc_norm": 0.6220183486238532, + "acc_norm_stderr": 0.02078918706672811 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.028568699752225868, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.028568699752225868 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.47875816993464054, + "acc_stderr": 0.020209572388600244, + "acc_norm": 0.47875816993464054, + "acc_norm_stderr": 0.020209572388600244 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044809, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044809 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.311731843575419, + "acc_stderr": 0.015491756531894638, + "acc_norm": 0.311731843575419, + "acc_norm_stderr": 0.015491756531894638 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.48161764705882354, + "acc_stderr": 0.030352303395351964, + "acc_norm": 0.48161764705882354, + "acc_norm_stderr": 0.030352303395351964 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610798, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610798 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38722294654498046, + "acc_stderr": 0.012441155326854931, + "acc_norm": 0.38722294654498046, + "acc_norm_stderr": 0.012441155326854931 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.034542365853806094, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.034542365853806094 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380027, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380027 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3635250917992656, + "mc1_stderr": 0.016838862883965817, + "mc2": 0.5248572009293102, + "mc2_stderr": 0.016409147736035586 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48642266824085006, + "acc_stderr": 0.01718401506040145, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.017185069732676538 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Yhyu13/LMCocktail-10.7B-v1", + "model_sha": "79ec3a42118f0715666b86bacab2688b62e1433b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/amphora/olaf-l.0.1/result_2023-10-21 02:17:55.json b/amphora/olaf-l.0.1/result_2023-10-21 02:17:55.json new file mode 100644 index 0000000000000000000000000000000000000000..4ac3e7afaf72bd57f989b06ec2b8293784808703 --- /dev/null +++ b/amphora/olaf-l.0.1/result_2023-10-21 02:17:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40784982935153585, + "acc_stderr": 0.01436109728844971, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007105 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40728938458474406, + "acc_stderr": 0.00490325426417762, + "acc_norm": 0.5451105357498506, + "acc_norm_stderr": 0.004969431900874312 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.017862091778507852, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.017862091778507852 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.02518914989476419, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.02518914989476419 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899208, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899208 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.01911721391149515, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.01911721391149515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296377, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296377 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254174, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254174 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823063, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875833, + "mc2": 0.4439993647512429, + "mc2_stderr": 0.014990045797851265 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4167650531286895, + "acc_stderr": 0.01695048914610883, + "acc_norm": 0.4817001180637544, + "acc_norm_stderr": 0.017178836639177738 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "amphora/olaf-l.0.1", + "model_sha": "1fe9598f2ec7fe35ce77e773ef35b97b893b11d0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/amphora/polyglot-5.8B-CoT-e1/result_2023-09-28 03:35:31.json b/amphora/polyglot-5.8B-CoT-e1/result_2023-09-28 03:35:31.json new file mode 100644 index 0000000000000000000000000000000000000000..38c73748db11cf781585bcda08eba4516653a9ef --- /dev/null +++ b/amphora/polyglot-5.8B-CoT-e1/result_2023-09-28 03:35:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29266211604095566, + "acc_stderr": 0.013295916103619413, + "acc_norm": 0.31399317406143346, + "acc_norm_stderr": 0.013562691224726291 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37004580760804623, + "acc_stderr": 0.00481829899101255, + "acc_norm": 0.47470623381796456, + "acc_norm_stderr": 0.004983392650570958 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20434227330779056, + "acc_stderr": 0.014419123980931906, + "acc_norm": 0.20434227330779056, + "acc_norm_stderr": 0.014419123980931906 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.22127659574468084, + "acc_stderr": 0.027136349602424063, + "acc_norm": 0.22127659574468084, + "acc_norm_stderr": 0.027136349602424063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.22289156626506024, + "acc_stderr": 0.03240004825594688, + "acc_norm": 0.22289156626506024, + "acc_norm_stderr": 0.03240004825594688 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11659192825112108, + "acc_stderr": 0.021539639816244464, + "acc_norm": 0.11659192825112108, + "acc_norm_stderr": 0.021539639816244464 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244441, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244441 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.18620689655172415, + "acc_stderr": 0.03243946159004616, + "acc_norm": 0.18620689655172415, + "acc_norm_stderr": 0.03243946159004616 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207763, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207763 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02934457250063435, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02934457250063435 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602364, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602364 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.0266620105785671, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.0266620105785671 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.02749566368372406, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.02749566368372406 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721376, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721376 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073835, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073835 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491842, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26878612716763006, + "acc_stderr": 0.023868003262500114, + "acc_norm": 0.26878612716763006, + "acc_norm_stderr": 0.023868003262500114 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.03351953879521271, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.03351953879521271 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.02346842983245115, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.02346842983245115 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3431192660550459, + "acc_stderr": 0.02035477773608604, + "acc_norm": 0.3431192660550459, + "acc_norm_stderr": 0.02035477773608604 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.17355371900826447, + "acc_stderr": 0.03457272836917669, + "acc_norm": 0.17355371900826447, + "acc_norm_stderr": 0.03457272836917669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.0166848209291486, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.0166848209291486 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.20921985815602837, + "acc_stderr": 0.02426476943998849, + "acc_norm": 0.20921985815602837, + "acc_norm_stderr": 0.02426476943998849 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347018, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347018 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403325, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403325 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.03113088039623593, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.03113088039623593 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.0283046579430353, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.0283046579430353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.011005971399927235, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.011005971399927235 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752332, + "mc2": 0.39158327266747156, + "mc2_stderr": 0.014622481693781006 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.015141752199573207, + "acc_norm": 0.3837072018890201, + "acc_norm_stderr": 0.01671892463723183 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "amphora/polyglot-5.8B-CoT-e1", + "model_sha": "e8f4cb1d884cf4d67e3e8afc0aab09c62a0d68c6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/amphora/small-instruct/result_2023-10-09 19:17:00.json b/amphora/small-instruct/result_2023-10-09 19:17:00.json new file mode 100644 index 0000000000000000000000000000000000000000..7ee1aabcfaac0409d65e78eda40b2b088653c9cd --- /dev/null +++ b/amphora/small-instruct/result_2023-10-09 19:17:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2773037542662116, + "acc_stderr": 0.013082095839059374, + "acc_norm": 0.32764505119453924, + "acc_norm_stderr": 0.013715847940719346 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34863572993427605, + "acc_stderr": 0.00475564501626385, + "acc_norm": 0.4313881696873133, + "acc_norm_stderr": 0.004942578520987342 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.03760178006026621, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.03760178006026621 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.01574549716904906, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.01574549716904906 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.029513196625539355, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.029513196625539355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21973094170403587, + "acc_stderr": 0.0277901770643836, + "acc_norm": 0.21973094170403587, + "acc_norm_stderr": 0.0277901770643836 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124505, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124505 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.02851025151234191, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.02851025151234191 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2, + "acc_stderr": 0.020280805062535722, + "acc_norm": 0.2, + "acc_norm_stderr": 0.020280805062535722 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2019704433497537, + "acc_stderr": 0.028247350122180267, + "acc_norm": 0.2019704433497537, + "acc_norm_stderr": 0.028247350122180267 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.02518900666021238, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.02518900666021238 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.27350427350427353, + "acc_stderr": 0.029202540153431183, + "acc_norm": 0.27350427350427353, + "acc_norm_stderr": 0.029202540153431183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.026480357179895712, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.026480357179895712 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878285, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878285 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712156, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712156 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.031157150869355575, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.031157150869355575 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525214, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525214 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508283, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.02474862449053737, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.02474862449053737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.03119584087770029, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.03119584087770029 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.018272575810231867, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.018272575810231867 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1349206349206349, + "acc_stderr": 0.030557101589417515, + "acc_norm": 0.1349206349206349, + "acc_norm_stderr": 0.030557101589417515 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.024404394928087866, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.024404394928087866 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.03611780560284898, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.03611780560284898 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.01818521895431809, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.01818521895431809 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729903, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729903 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697623, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697623 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422273, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422273 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098426, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1875, + "acc_stderr": 0.023709788253811766, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.023709788253811766 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417353, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417353 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570342, + "mc2": 0.415216441138711, + "mc2_stderr": 0.015096025074072256 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2585596221959858, + "acc_stderr": 0.01505335443896398, + "acc_norm": 0.3577331759149941, + "acc_norm_stderr": 0.01647980893574998 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "amphora/small-instruct", + "model_sha": "f88e14dc4b3b2b4f00261e77458497fac4f7a600", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/asapppppp/Llama2-ko-DPO-13B_lora_finetuning/result_2023-11-14 00:14:33.json b/asapppppp/Llama2-ko-DPO-13B_lora_finetuning/result_2023-11-14 00:14:33.json new file mode 100644 index 0000000000000000000000000000000000000000..1ffce89369745a33b64ba98798a2091ac04b9b59 --- /dev/null +++ b/asapppppp/Llama2-ko-DPO-13B_lora_finetuning/result_2023-11-14 00:14:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22696245733788395, + "acc_stderr": 0.01224049153613286, + "acc_norm": 0.22696245733788395, + "acc_norm_stderr": 0.01224049153613286 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 1.0, + "mc1_stderr": 0.0, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252247, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.014846044968252247 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "asapppppp/Llama2-ko-DPO-13B_lora_finetuning", + "model_sha": "34a143bbc3acf9826e32df75f38f0854cd8c6970", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/asapppppp/kullm-polyglot-12.8b-v2_lora_finetuning/result_2023-11-09 05:59:46.json b/asapppppp/kullm-polyglot-12.8b-v2_lora_finetuning/result_2023-11-09 05:59:46.json new file mode 100644 index 0000000000000000000000000000000000000000..58927976d881e988965c5ddf389a08f3a51e6919 --- /dev/null +++ b/asapppppp/kullm-polyglot-12.8b-v2_lora_finetuning/result_2023-11-09 05:59:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.189419795221843, + "acc_stderr": 0.011450705115910767, + "acc_norm": 0.23720136518771331, + "acc_norm_stderr": 0.012430399829260847 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25423222465644296, + "acc_stderr": 0.004345388614520031, + "acc_norm": 0.2634933280223063, + "acc_norm_stderr": 0.004396273173717444 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803627, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803627 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378984, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.03058759135160425, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.03058759135160425 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.21909424724602203, + "mc1_stderr": 0.014480038578757445, + "mc2": 0.4671081145098644, + "mc2_stderr": 0.01682496498545748 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0743801652892562, + "acc_stderr": 0.009021104510906089, + "acc_norm": 0.23258559622195984, + "acc_norm_stderr": 0.01452516918241648 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "asapppppp/kullm-polyglot-12.8b-v2_lora_finetuning", + "model_sha": "1902b727d43126031c1d5a8a28727e6427db578c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/asapppppp/polyglot_12.8B_lora_finetuning/result_2023-11-06 06:06:47.json b/asapppppp/polyglot_12.8B_lora_finetuning/result_2023-11-06 06:06:47.json new file mode 100644 index 0000000000000000000000000000000000000000..d8545173e16ce6bf5c2e39ba8a349d54b7dcd08f --- /dev/null +++ b/asapppppp/polyglot_12.8B_lora_finetuning/result_2023-11-06 06:06:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20563139931740615, + "acc_stderr": 0.011810745260742574, + "acc_norm": 0.23378839590443687, + "acc_norm_stderr": 0.012368225378507135 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25632344154550885, + "acc_stderr": 0.004357101984278613, + "acc_norm": 0.2621987651862179, + "acc_norm_stderr": 0.004389312748012148 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21328224776500637, + "acc_stderr": 0.014648172749593515, + "acc_norm": 0.21328224776500637, + "acc_norm_stderr": 0.014648172749593515 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614865, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614865 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039783, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039783 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18674698795180722, + "acc_stderr": 0.030338749144500576, + "acc_norm": 0.18674698795180722, + "acc_norm_stderr": 0.030338749144500576 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.14349775784753363, + "acc_stderr": 0.02352937126961819, + "acc_norm": 0.14349775784753363, + "acc_norm_stderr": 0.02352937126961819 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756775, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756775 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602364, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602364 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094631, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094631 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.026522709674667768, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.026522709674667768 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20512820512820512, + "acc_stderr": 0.026453508054040332, + "acc_norm": 0.20512820512820512, + "acc_norm_stderr": 0.026453508054040332 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.035676037996391706, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.035676037996391706 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.022894082489925992, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.022894082489925992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3471502590673575, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.3471502590673575, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281335, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281335 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3431192660550459, + "acc_stderr": 0.020354777736086037, + "acc_norm": 0.3431192660550459, + "acc_norm_stderr": 0.020354777736086037 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.025829163272757482, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.025829163272757482 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1322314049586777, + "acc_stderr": 0.030922788320445805, + "acc_norm": 0.1322314049586777, + "acc_norm_stderr": 0.030922788320445805 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736393, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736393 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872405, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966342, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966342 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174906, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.027479744550808507, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.027479744550808507 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23468057366362452, + "acc_stderr": 0.010824026872449353, + "acc_norm": 0.23468057366362452, + "acc_norm_stderr": 0.010824026872449353 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.03096451792692339, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.03096451792692339 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603489, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603489 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22031823745410037, + "mc1_stderr": 0.014509045171487288, + "mc2": 0.4700873980893058, + "mc2_stderr": 0.016808897693551126 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0743801652892562, + "acc_stderr": 0.00902110451090609, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.014676495332267255 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "asapppppp/polyglot_12.8B_lora_finetuning", + "model_sha": "c47154b0514ec6e194274ed6e512acb20aededdf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/KoAlpaca-KoRWKV-1.5B/result_2023-10-31 18:36:44.json b/beomi/KoAlpaca-KoRWKV-1.5B/result_2023-10-31 18:36:44.json new file mode 100644 index 0000000000000000000000000000000000000000..f2efb421117eeb48ccf558cea1c1e0489f9f9690 --- /dev/null +++ b/beomi/KoAlpaca-KoRWKV-1.5B/result_2023-10-31 18:36:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19965870307167236, + "acc_stderr": 0.011681625756888683, + "acc_norm": 0.26023890784982934, + "acc_norm_stderr": 0.012821930225112554 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2802230631348337, + "acc_stderr": 0.0044819026375056545, + "acc_norm": 0.30850428201553476, + "acc_norm_stderr": 0.004609320024893897 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398696, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398696 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1477832512315271, + "acc_stderr": 0.024969621333521274, + "acc_norm": 0.1477832512315271, + "acc_norm_stderr": 0.024969621333521274 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473835, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473835 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.021591269407823778, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.021591269407823778 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1944954128440367, + "acc_stderr": 0.016970289090458043, + "acc_norm": 0.1944954128440367, + "acc_norm_stderr": 0.016970289090458043 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.024051029739912258, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.024051029739912258 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1875, + "acc_stderr": 0.023709788253811766, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.023709788253811766 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546195, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546195 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.01100597139992723, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.01100597139992723 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834564, + "mc2": 0.4765435436005545, + "mc2_stderr": 0.016302547246046926 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2809917355371901, + "acc_stderr": 0.015453559655458277, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/KoAlpaca-KoRWKV-1.5B", + "model_sha": "d463395c698beef06743bfa019d27aa185f8a3af", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/KoAlpaca-KoRWKV-6B/result_2023-10-31 18:35:44.json b/beomi/KoAlpaca-KoRWKV-6B/result_2023-10-31 18:35:44.json new file mode 100644 index 0000000000000000000000000000000000000000..68405dba8691d02da58fdaa3f6afa4881445b1c9 --- /dev/null +++ b/beomi/KoAlpaca-KoRWKV-6B/result_2023-10-31 18:35:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2764505119453925, + "acc_stderr": 0.013069662474252428, + "acc_norm": 0.3191126279863481, + "acc_norm_stderr": 0.013621696119173304 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3461461860187214, + "acc_stderr": 0.00474768200349145, + "acc_norm": 0.4402509460266879, + "acc_norm_stderr": 0.004954026775425764 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.01598281477469563, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.01598281477469563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.03106939026078942, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.03106939026078942 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818777, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818777 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.0384487613978527, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.0384487613978527 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.037800192304380156, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.037800192304380156 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993179, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993179 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694433, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694433 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.024580028921481006, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.024580028921481006 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.25213675213675213, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.25213675213675213, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899105, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.17218543046357615, + "acc_stderr": 0.030826136961962382, + "acc_norm": 0.17218543046357615, + "acc_norm_stderr": 0.030826136961962382 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.02218203720294837, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.02218203720294837 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2861271676300578, + "acc_stderr": 0.024332146779134128, + "acc_norm": 0.2861271676300578, + "acc_norm_stderr": 0.024332146779134128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2993827160493827, + "acc_stderr": 0.02548311560119547, + "acc_norm": 0.2993827160493827, + "acc_norm_stderr": 0.02548311560119547 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583638, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583638 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22385321100917432, + "acc_stderr": 0.017871217767790215, + "acc_norm": 0.22385321100917432, + "acc_norm_stderr": 0.017871217767790215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591203, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591203 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.017917974069594726, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.017917974069594726 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266733, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266733 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2711864406779661, + "acc_stderr": 0.011354581451622981, + "acc_norm": 0.2711864406779661, + "acc_norm_stderr": 0.011354581451622981 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570338, + "mc2": 0.40076474326126255, + "mc2_stderr": 0.014949577614485286 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4132231404958678, + "acc_stderr": 0.01692948023449523, + "acc_norm": 0.5478158205430933, + "acc_norm_stderr": 0.017111567130916796 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/KoAlpaca-KoRWKV-6B", + "model_sha": "427ee72c4350f26de1b287a0c07b842e7d168dbc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/KoAlpaca-Polyglot-12.8B/result_2023-09-26 09:57:09.json b/beomi/KoAlpaca-Polyglot-12.8B/result_2023-09-26 09:57:09.json new file mode 100644 index 0000000000000000000000000000000000000000..55ba56b7b40b3c6a06a5231cd5aada8f4526cdb2 --- /dev/null +++ b/beomi/KoAlpaca-Polyglot-12.8B/result_2023-09-26 09:57:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31569965870307165, + "acc_stderr": 0.013582571095815291, + "acc_norm": 0.3438566552901024, + "acc_norm_stderr": 0.013880644570156208 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3873730332603067, + "acc_stderr": 0.004861544478451863, + "acc_norm": 0.4980083648675563, + "acc_norm_stderr": 0.004989741826250387 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26947637292464877, + "acc_stderr": 0.01586624307321505, + "acc_norm": 0.26947637292464877, + "acc_norm_stderr": 0.01586624307321505 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614867, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614867 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628827, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628827 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.033293941190735296, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.033293941190735296 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.026936111912802277, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.026936111912802277 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365897, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365897 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.03416520447747549, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.03416520447747549 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362445, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362445 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882374, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882374 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2, + "acc_stderr": 0.020280805062535722, + "acc_norm": 0.2, + "acc_norm_stderr": 0.020280805062535722 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.03893542518824847, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.03893542518824847 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678243, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678243 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.02458002892148101, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.02458002892148101 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3034188034188034, + "acc_stderr": 0.030118210106942645, + "acc_norm": 0.3034188034188034, + "acc_norm_stderr": 0.030118210106942645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.02461829819586651, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02461829819586651 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.03014777593540922, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.03014777593540922 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400168, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.037161774375660164, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.037161774375660164 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26878612716763006, + "acc_stderr": 0.02386800326250011, + "acc_norm": 0.26878612716763006, + "acc_norm_stderr": 0.02386800326250011 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.02517104191530968, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.02517104191530968 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700286, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700286 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.01822407811729908, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.01822407811729908 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.034550710191021496, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.034550710191021496 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.0249541843248799, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.0249541843248799 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810535, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810535 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.017883188134667192, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.017883188134667192 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220513, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220513 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.02315746830855938, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.02315746830855938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.029818024749753102, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.029818024749753102 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25684485006518903, + "acc_stderr": 0.011158455853098862, + "acc_norm": 0.25684485006518903, + "acc_norm_stderr": 0.011158455853098862 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.4196185756093357, + "mc2_stderr": 0.01602551288494906 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2987012987012987, + "acc_stderr": 0.015735657391438295, + "acc_norm": 0.3482880755608028, + "acc_norm_stderr": 0.016379926739148037 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/KoAlpaca-Polyglot-12.8B", + "model_sha": "5f225e9c5ae6c7238fc2316da0b8a9922019674d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/KoAlpaca-Polyglot-5.8B/result_2023-09-26 09:56:49.json b/beomi/KoAlpaca-Polyglot-5.8B/result_2023-09-26 09:56:49.json new file mode 100644 index 0000000000000000000000000000000000000000..5c613b1f0c0a2ca6366a1c65368f7de0e3252cc8 --- /dev/null +++ b/beomi/KoAlpaca-Polyglot-5.8B/result_2023-09-26 09:56:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2593856655290102, + "acc_stderr": 0.012808273573927094, + "acc_norm": 0.3037542662116041, + "acc_norm_stderr": 0.01343890918477876 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3390758812985461, + "acc_stderr": 0.004724281487819373, + "acc_norm": 0.4146584345747859, + "acc_norm_stderr": 0.004916561213591286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.03401052620104088, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.03401052620104088 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23243933588761176, + "acc_stderr": 0.015104550008905699, + "acc_norm": 0.23243933588761176, + "acc_norm_stderr": 0.015104550008905699 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23829787234042554, + "acc_stderr": 0.027851252973889788, + "acc_norm": 0.23829787234042554, + "acc_norm_stderr": 0.027851252973889788 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.034106466140718564, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.034106466140718564 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.025218040373410622, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.025218040373410622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2556053811659193, + "acc_stderr": 0.029275891003969927, + "acc_norm": 0.2556053811659193, + "acc_norm_stderr": 0.029275891003969927 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22137404580152673, + "acc_stderr": 0.0364129708131373, + "acc_norm": 0.22137404580152673, + "acc_norm_stderr": 0.0364129708131373 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.034240846698915216, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.034240846698915216 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.030066761582977934, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.030066761582977934 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28974358974358977, + "acc_stderr": 0.02300062824368796, + "acc_norm": 0.28974358974358977, + "acc_norm_stderr": 0.02300062824368796 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114475, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114475 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.02637756702864586, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.02637756702864586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914418, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914418 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21132075471698114, + "acc_stderr": 0.025125766484827845, + "acc_norm": 0.21132075471698114, + "acc_norm_stderr": 0.025125766484827845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.043091187099464585, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.043091187099464585 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.029929415408348384, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.029929415408348384 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.03456425745086998, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.03456425745086998 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031708, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031708 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.02344582627654555, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.02344582627654555 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02492200116888632, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02492200116888632 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089116, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089116 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21834862385321102, + "acc_stderr": 0.01771260052872273, + "acc_norm": 0.21834862385321102, + "acc_norm_stderr": 0.01771260052872273 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.025058503316958157, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.025058503316958157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.017035229258034038, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.017035229258034038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537773, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537773 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.03485946096475741, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.03485946096475741 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.03038805130167812, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.03038805130167812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.0141022236231526, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.0141022236231526 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.02643132987078953, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.02643132987078953 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073132, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073132 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771316, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923413, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923413 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731613, + "mc2": 0.40043350315231013, + "mc2_stderr": 0.01604778937263507 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2892561983471074, + "acc_stderr": 0.015588800386053555, + "acc_norm": 0.31759149940968123, + "acc_norm_stderr": 0.016005581876229306 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/KoAlpaca-Polyglot-5.8B", + "model_sha": "cb1597cbaf4a98e52e6b767381a80893e4818477", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/KoRWKV-1.5B/result_2023-10-31 18:37:02.json b/beomi/KoRWKV-1.5B/result_2023-10-31 18:37:02.json new file mode 100644 index 0000000000000000000000000000000000000000..9af029677be357ca14786b39e627a3507e5eb2ed --- /dev/null +++ b/beomi/KoRWKV-1.5B/result_2023-10-31 18:37:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2150170648464164, + "acc_stderr": 0.012005717634133616, + "acc_norm": 0.2687713310580205, + "acc_norm_stderr": 0.012955065963710686 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3241386178052181, + "acc_stderr": 0.004670955399641123, + "acc_norm": 0.3950408285202151, + "acc_norm_stderr": 0.004878603699686037 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824564, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824564 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.01598281477469563, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.01598281477469563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386705, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386705 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.032361983509282745, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.032361983509282745 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494564, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494564 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361252, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361252 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.021763733684173923, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.021763733684173923 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03826076324884863, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03826076324884863 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23225806451612904, + "acc_stderr": 0.024022256130308235, + "acc_norm": 0.23225806451612904, + "acc_norm_stderr": 0.024022256130308235 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20085470085470086, + "acc_stderr": 0.02624677294689048, + "acc_norm": 0.20085470085470086, + "acc_norm_stderr": 0.02624677294689048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741695, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741695 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.032424147574830975, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.032424147574830975 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240018, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826373, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826373 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.0227797190887334, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.0227797190887334 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3471502590673575, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.3471502590673575, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27522935779816515, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.27522935779816515, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341023, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341023 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.016992723465466233, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.016992723465466233 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642973, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642973 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190735, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190735 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.033723432716530624, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.033723432716530624 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925312, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925312 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003472, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003472 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.030116426296540582, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.030116426296540582 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2503259452411995, + "acc_stderr": 0.011064151027165441, + "acc_norm": 0.2503259452411995, + "acc_norm_stderr": 0.011064151027165441 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693247, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693247 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603489, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603489 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.01489627744104187, + "mc2": 0.40377476002097423, + "mc2_stderr": 0.01506211934008226 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3955135773317591, + "acc_stderr": 0.016810815902206042, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549346 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/KoRWKV-1.5B", + "model_sha": "e2e327ae9075c634e8b127f262412d670038621e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/KoRWKV-6B/result_2023-10-31 18:36:08.json b/beomi/KoRWKV-6B/result_2023-10-31 18:36:08.json new file mode 100644 index 0000000000000000000000000000000000000000..d437c8a2407d3850b01ed125f6851617b49d6f15 --- /dev/null +++ b/beomi/KoRWKV-6B/result_2023-10-31 18:36:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24146757679180889, + "acc_stderr": 0.01250656483973943, + "acc_norm": 0.28668941979522183, + "acc_norm_stderr": 0.013214986329274779 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3467436765584545, + "acc_stderr": 0.004749606196363337, + "acc_norm": 0.4356701852220673, + "acc_norm_stderr": 0.004948310399746081 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.03401052620104089, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.03401052620104089 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.039891398595317706, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.039891398595317706 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27458492975734355, + "acc_stderr": 0.01595982993308404, + "acc_norm": 0.27458492975734355, + "acc_norm_stderr": 0.01595982993308404 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.0261488180184245, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0261488180184245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064537, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064537 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818774, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818774 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21524663677130046, + "acc_stderr": 0.027584066602208274, + "acc_norm": 0.21524663677130046, + "acc_norm_stderr": 0.027584066602208274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.0298575156733864, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.0298575156733864 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2846153846153846, + "acc_stderr": 0.022878322799706304, + "acc_norm": 0.2846153846153846, + "acc_norm_stderr": 0.022878322799706304 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2094017094017094, + "acc_stderr": 0.026655699653922737, + "acc_norm": 0.2094017094017094, + "acc_norm_stderr": 0.026655699653922737 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891356, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891356 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.15454545454545454, + "acc_stderr": 0.03462262571262667, + "acc_norm": 0.15454545454545454, + "acc_norm_stderr": 0.03462262571262667 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573026, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573026 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.021591269407823778, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.021591269407823778 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.033961162058453336, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.033961162058453336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.022289638852617904, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.022289638852617904 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924034, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924034 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.02346842983245116, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.02346842983245116 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.31088082901554404, + "acc_stderr": 0.03340361906276587, + "acc_norm": 0.31088082901554404, + "acc_norm_stderr": 0.03340361906276587 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399421, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399421 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24770642201834864, + "acc_stderr": 0.018508143602547805, + "acc_norm": 0.24770642201834864, + "acc_norm_stderr": 0.018508143602547805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.02495418432487991, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.02495418432487991 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302871, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302871 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533156, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533156 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142783, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.22685788787483702, + "acc_stderr": 0.010696348133569924, + "acc_norm": 0.22685788787483702, + "acc_norm_stderr": 0.010696348133569924 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.03256866661681102, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.03256866661681102 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22399020807833536, + "mc1_stderr": 0.014594964329474205, + "mc2": 0.38068228949426847, + "mc2_stderr": 0.014620809751439413 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4037780401416765, + "acc_stderr": 0.016869031540298632, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.017133218276537673 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/KoRWKV-6B", + "model_sha": "541600070459baf0f1be9560181d5ceb77794085", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/Yi-Ko-6B/result_2023-12-01 23:58:13.json b/beomi/Yi-Ko-6B/result_2023-12-01 23:58:13.json new file mode 100644 index 0000000000000000000000000000000000000000..4640f797e418c923b1323a9b5000d7dc3f61c5f3 --- /dev/null +++ b/beomi/Yi-Ko-6B/result_2023-12-01 23:58:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3438566552901024, + "acc_stderr": 0.013880644570156211, + "acc_norm": 0.4104095563139932, + "acc_norm_stderr": 0.014374922192642664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39822744473212507, + "acc_stderr": 0.004885323175701673, + "acc_norm": 0.5338577972515435, + "acc_norm_stderr": 0.004978328190775523 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633944, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633944 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736118, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736118 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5290322580645161, + "acc_stderr": 0.02839601640276099, + "acc_norm": 0.5290322580645161, + "acc_norm_stderr": 0.02839601640276099 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.029343114798094462, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.029343114798094462 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255654, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255654 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622842, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622842 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750187, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750187 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540479, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540479 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522509, + "mc2": 0.4163606442574381, + "mc2_stderr": 0.014821458524779802 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5737898465171193, + "acc_stderr": 0.017002122609489252, + "acc_norm": 0.6162927981109799, + "acc_norm_stderr": 0.016718924637231826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/Yi-Ko-6B", + "model_sha": "edb86b47219f600f391da5821a07a22ab450b11b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/kollama-13b/result_2023-09-26 17:41:30.json b/beomi/kollama-13b/result_2023-09-26 17:41:30.json new file mode 100644 index 0000000000000000000000000000000000000000..22962cd72290ddbaf9190d2001d0a2857b636fc0 --- /dev/null +++ b/beomi/kollama-13b/result_2023-09-26 17:41:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.18430034129692832, + "acc_stderr": 0.011330517933037432, + "acc_norm": 0.24061433447098976, + "acc_norm_stderr": 0.012491468532390559 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2724556861183031, + "acc_stderr": 0.004443131632679339, + "acc_norm": 0.2983469428400717, + "acc_norm_stderr": 0.004565974937793705 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.015671006009339582, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.015671006009339582 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188947, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188947 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.030216831011508773, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.030216831011508773 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.02619980880756189, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.02619980880756189 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834839, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834839 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2191358024691358, + "acc_stderr": 0.023016705640262185, + "acc_norm": 0.2191358024691358, + "acc_norm_stderr": 0.023016705640262185 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24036697247706423, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.24036697247706423, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604675, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604675 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290396, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.0348594609647574, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.0348594609647574 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.03119223072679566, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.03119223072679566 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715021, + "mc2": 0.47018197225111685, + "mc2_stderr": 0.016150007373089376 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2101534828807556, + "acc_stderr": 0.014007301224897517, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.016605801289212588 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/kollama-13b", + "model_sha": "d25ffb8c1a147e67c1bce0aca49a710395ce18ae", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/llama-2-ko-7b-emb-dev/result_2023-12-28 05:13:27.json b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-28 05:13:27.json new file mode 100644 index 0000000000000000000000000000000000000000..e1a7266a65a81bc8f5ef8f1e249bbe3a8033b3b6 --- /dev/null +++ b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-28 05:13:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2790102389078498, + "acc_stderr": 0.013106784883601355, + "acc_norm": 0.3455631399317406, + "acc_norm_stderr": 0.01389693846114568 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33808006373232424, + "acc_stderr": 0.004720891597174718, + "acc_norm": 0.45120493925512845, + "acc_norm_stderr": 0.0049659636472103195 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.31545338441890164, + "acc_stderr": 0.016617501738763394, + "acc_norm": 0.31545338441890164, + "acc_norm_stderr": 0.016617501738763394 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.026311858071854155, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.026311858071854155 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.04039314978724562, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.04039314978724562 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277723, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277723 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2358974358974359, + "acc_stderr": 0.021525965407408726, + "acc_norm": 0.2358974358974359, + "acc_norm_stderr": 0.021525965407408726 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854932, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22167487684729065, + "acc_stderr": 0.0292255758924896, + "acc_norm": 0.22167487684729065, + "acc_norm_stderr": 0.0292255758924896 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.025822106119415895, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.025822106119415895 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3247863247863248, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.3247863247863248, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.29056603773584905, + "acc_stderr": 0.02794321998933716, + "acc_norm": 0.29056603773584905, + "acc_norm_stderr": 0.02794321998933716 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333334, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333334 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173042, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173042 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708614, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708614 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.024547617794803828, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.024547617794803828 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765127, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765127 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27461139896373055, + "acc_stderr": 0.032210245080411516, + "acc_norm": 0.27461139896373055, + "acc_norm_stderr": 0.032210245080411516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25137614678899084, + "acc_stderr": 0.018599206360287415, + "acc_norm": 0.25137614678899084, + "acc_norm_stderr": 0.018599206360287415 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.03512207412302052, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.03512207412302052 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279035, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279035 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677098, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677098 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3022875816993464, + "acc_stderr": 0.018579232711113874, + "acc_norm": 0.3022875816993464, + "acc_norm_stderr": 0.018579232711113874 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461014, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626978, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626978 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1801470588235294, + "acc_stderr": 0.02334516361654488, + "acc_norm": 0.1801470588235294, + "acc_norm_stderr": 0.02334516361654488 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.030781549102026216, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.030781549102026216 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26401564537157757, + "acc_stderr": 0.011258435537723814, + "acc_norm": 0.26401564537157757, + "acc_norm_stderr": 0.011258435537723814 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967407, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967407 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055952, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055952 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.42530376345187815, + "mc2_stderr": 0.015252754425393767 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.15584415584415584, + "acc_stderr": 0.012470141877923077, + "acc_norm": 0.3577331759149941, + "acc_norm_stderr": 0.016479808935749976 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/llama-2-ko-7b-emb-dev", + "model_sha": "f1ff977bd4ee3f0c2a3ee7dd1c4b7750e3a0766c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/llama-2-ko-7b-emb-dev/result_2023-12-29 06:46:01.json b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-29 06:46:01.json new file mode 100644 index 0000000000000000000000000000000000000000..97a79f16b02e2028b6b45d12d986418ae66a68f1 --- /dev/null +++ b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-29 06:46:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30204778156996587, + "acc_stderr": 0.013417519144716429, + "acc_norm": 0.378839590443686, + "acc_norm_stderr": 0.014175915490000324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35480979884485164, + "acc_stderr": 0.0047747781803451845, + "acc_norm": 0.47390957976498704, + "acc_norm_stderr": 0.0049829835924591935 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.033014059469872487, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.033014059469872487 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384493, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384493 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3269476372924649, + "acc_stderr": 0.01677490818013146, + "acc_norm": 0.3269476372924649, + "acc_norm_stderr": 0.01677490818013146 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3215434083601286, + "acc_stderr": 0.026527724079528872, + "acc_norm": 0.3215434083601286, + "acc_norm_stderr": 0.026527724079528872 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.03089861088247751, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.03089861088247751 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438013, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438013 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3, + "acc_stderr": 0.023234581088428494, + "acc_norm": 0.3, + "acc_norm_stderr": 0.023234581088428494 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.043300437496507416, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.043300437496507416 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.03295797566311271, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.03295797566311271 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.026522709674667768, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.026522709674667768 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3034188034188034, + "acc_stderr": 0.03011821010694266, + "acc_norm": 0.3034188034188034, + "acc_norm_stderr": 0.03011821010694266 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2830188679245283, + "acc_stderr": 0.0277242364927009, + "acc_norm": 0.2830188679245283, + "acc_norm_stderr": 0.0277242364927009 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910508, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3880597014925373, + "acc_stderr": 0.0344578996436275, + "acc_norm": 0.3880597014925373, + "acc_norm_stderr": 0.0344578996436275 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.023135287974325635, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.023135287974325635 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624576, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624576 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26878612716763006, + "acc_stderr": 0.023868003262500104, + "acc_norm": 0.26878612716763006, + "acc_norm_stderr": 0.023868003262500104 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.03487825168497892, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.03487825168497892 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.025773111169630453, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.025773111169630453 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26055045871559634, + "acc_stderr": 0.018819182034850068, + "acc_norm": 0.26055045871559634, + "acc_norm_stderr": 0.018819182034850068 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.026643278474508755, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.026643278474508755 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4132231404958678, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612379002, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612379002 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902006, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828977, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828977 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290803, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290803 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2607561929595828, + "acc_stderr": 0.011213471559602334, + "acc_norm": 0.2607561929595828, + "acc_norm_stderr": 0.011213471559602334 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624337, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624337 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148123, + "mc2": 0.4175794689167079, + "mc2_stderr": 0.01513967881843377 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.13695395513577333, + "acc_stderr": 0.011820043946570876, + "acc_norm": 0.33530106257378983, + "acc_norm_stderr": 0.016230981232989817 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/llama-2-ko-7b-emb-dev", + "model_sha": "3796dc4797838aa3c3a9cd22a3d2b73b931fc684", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/llama-2-ko-7b-emb-dev/result_2023-12-30 09:33:08.json b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-30 09:33:08.json new file mode 100644 index 0000000000000000000000000000000000000000..d28333944cc424b8fdda9ba5be6bcba1fbd9a02e --- /dev/null +++ b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-30 09:33:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.013697432466693239, + "acc_norm": 0.40273037542662116, + "acc_norm_stderr": 0.014332236306790147 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3641704839673372, + "acc_stderr": 0.004802133511654224, + "acc_norm": 0.49083847839075884, + "acc_norm_stderr": 0.004988943721711207 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.03301405946987249, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.03301405946987249 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.30395913154533843, + "acc_stderr": 0.016448321686769043, + "acc_norm": 0.30395913154533843, + "acc_norm_stderr": 0.016448321686769043 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998484, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998484 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727774, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727774 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341923, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341923 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.02228214120420443, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.02228214120420443 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144446, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671742, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671742 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2830188679245283, + "acc_stderr": 0.0277242364927009, + "acc_norm": 0.2830188679245283, + "acc_norm_stderr": 0.0277242364927009 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.03479185572599661, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.03479185572599661 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.03220024104534204, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.03220024104534204 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.03512385283705051, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.03512385283705051 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02492200116888633, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02492200116888633 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20207253886010362, + "acc_stderr": 0.02897908979429673, + "acc_norm": 0.20207253886010362, + "acc_norm_stderr": 0.02897908979429673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.02555316999182652, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.02555316999182652 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880585, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880585 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467764, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467764 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791044, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.24632352941176472, + "acc_stderr": 0.02617343857052, + "acc_norm": 0.24632352941176472, + "acc_norm_stderr": 0.02617343857052 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.0265370453121453, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.0265370453121453 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.03027497488021898, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.03027497488021898 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.028867431449849313, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.028867431449849313 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283347, + "mc2": 0.430843038646161, + "mc2_stderr": 0.015222244438027463 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.15348288075560804, + "acc_stderr": 0.012392606565325119, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.016327334806429145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/llama-2-ko-7b-emb-dev", + "model_sha": "3796dc4797838aa3c3a9cd22a3d2b73b931fc684", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/llama-2-ko-7b-emb-dev/result_2023-12-31 09:24:44.json b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-31 09:24:44.json new file mode 100644 index 0000000000000000000000000000000000000000..c2855517086636e8296c98b3834f18d4e2437c53 --- /dev/null +++ b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-31 09:24:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3387372013651877, + "acc_stderr": 0.013830568927974334, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303031 + }, + "harness|ko_hellaswag|10": { + "acc": 0.364070902210715, + "acc_stderr": 0.0048018528813297484, + "acc_norm": 0.49741087432782316, + "acc_norm_stderr": 0.004989714512282407 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.03158149539338733, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.03158149539338733 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3052362707535121, + "acc_stderr": 0.016467711947635123, + "acc_norm": 0.3052362707535121, + "acc_norm_stderr": 0.016467711947635123 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595852, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595852 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632924, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632924 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168264, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.038808483010823944, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.038808483010823944 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646826, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646826 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.02921354941437216, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.02921354941437216 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.02228214120420443, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.02228214120420443 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.032406615658684086, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.032406615658684086 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.02573654274559453, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.02573654274559453 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891165, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891165 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493875, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493875 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094527, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094527 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.02394851290546836, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.02394851290546836 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.025171041915309684, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.025171041915309684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.029778663037752954, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.029778663037752954 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26605504587155965, + "acc_stderr": 0.018946022322225597, + "acc_norm": 0.26605504587155965, + "acc_norm_stderr": 0.018946022322225597 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25326797385620914, + "acc_stderr": 0.017593486895366828, + "acc_norm": 0.25326797385620914, + "acc_norm_stderr": 0.017593486895366828 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.02769691071309394, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.02769691071309394 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031218, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031218 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.02826388994378461, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.02826388994378461 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842544, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842544 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26401564537157757, + "acc_stderr": 0.011258435537723814, + "acc_norm": 0.26401564537157757, + "acc_norm_stderr": 0.011258435537723814 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299953, + "mc2": 0.4275383331125476, + "mc2_stderr": 0.01526305656191646 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.14403778040141677, + "acc_stderr": 0.012072030576668953, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.016605801289212598 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/llama-2-ko-7b-emb-dev", + "model_sha": "d0e8d08d5f41082f3f48ec990edc2eb521ac2e73", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/llama-2-ko-7b/result_2023-09-26 12:00:48.json b/beomi/llama-2-ko-7b/result_2023-09-26 12:00:48.json new file mode 100644 index 0000000000000000000000000000000000000000..3d0d0cfd3631e091caff2c99dcb627c3e211f605 --- /dev/null +++ b/beomi/llama-2-ko-7b/result_2023-09-26 12:00:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349814 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38309101772555265, + "acc_stderr": 0.004851466623601449, + "acc_norm": 0.4958175662218682, + "acc_norm_stderr": 0.00498960683837107 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.016905207420803554, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.016905207420803554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880557, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880557 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.030066761582977924, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.030066761582977924 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2743589743589744, + "acc_stderr": 0.022622765767493197, + "acc_norm": 0.2743589743589744, + "acc_norm_stderr": 0.022622765767493197 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671746, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671746 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36752136752136755, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.36752136752136755, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505416, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505416 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959912, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959912 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022596, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022596 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159614, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159614 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484577, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484577 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936484, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936484 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301833, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301833 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733096, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733096 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37061663539899015, + "mc2_stderr": 0.014735219813379136 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/llama-2-ko-7b", + "model_sha": "4e6e7eb86c9c98600cad6d7ae9942204302a48a1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/llama-2-koen-13b/result_2023-11-20 05:57:13.json b/beomi/llama-2-koen-13b/result_2023-11-20 05:57:13.json new file mode 100644 index 0000000000000000000000000000000000000000..a9c86fc049b22fc793c34042251fed151731fffb --- /dev/null +++ b/beomi/llama-2-koen-13b/result_2023-11-20 05:57:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38054607508532423, + "acc_stderr": 0.014188277712349814, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4133638717386975, + "acc_stderr": 0.004914305798575696, + "acc_norm": 0.5623381796454889, + "acc_norm_stderr": 0.004950848456984544 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.017870847506081734, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.017870847506081734 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.025174048384000777, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.025174048384000777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.031426169937919246, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.031426169937919246 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486634, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486634 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30182529335071706, + "acc_stderr": 0.01172435051810589, + "acc_norm": 0.30182529335071706, + "acc_norm_stderr": 0.01172435051810589 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156474, + "mc2": 0.409778749789472, + "mc2_stderr": 0.014729442757477942 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4592680047225502, + "acc_stderr": 0.01713321827653767, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.017115418225226872 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/llama-2-koen-13b", + "model_sha": "087f0a3d78be66478382bb9a3aad0a1594215e53", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/open-llama-2-ko-7b/result_2023-12-14 14:14:49.json b/beomi/open-llama-2-ko-7b/result_2023-12-14 14:14:49.json new file mode 100644 index 0000000000000000000000000000000000000000..03c5f84d51ba8197946d3c474920408701ca148a --- /dev/null +++ b/beomi/open-llama-2-ko-7b/result_2023-12-14 14:14:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635474, + "acc_norm": 0.40017064846416384, + "acc_norm_stderr": 0.014317197787809169 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38498307110137425, + "acc_stderr": 0.004855968578998728, + "acc_norm": 0.502688707428799, + "acc_norm_stderr": 0.004989709267191013 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038266, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038266 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3218390804597701, + "acc_stderr": 0.0167063814150579, + "acc_norm": 0.3218390804597701, + "acc_norm_stderr": 0.0167063814150579 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768076, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768076 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534425, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.02540383297817961, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.02540383297817961 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365928, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365928 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863797, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863797 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.021606294494647727, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.021606294494647727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.046166311118017125, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.046166311118017125 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358608, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358608 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.29354838709677417, + "acc_stderr": 0.0259060870213193, + "acc_norm": 0.29354838709677417, + "acc_norm_stderr": 0.0259060870213193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30566037735849055, + "acc_stderr": 0.028353298073322666, + "acc_norm": 0.30566037735849055, + "acc_norm_stderr": 0.028353298073322666 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910508, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766118, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766118 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.031862098516411426, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.031862098516411426 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491841, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491841 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.02394851290546836, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.02394851290546836 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724146, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724146 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.03161877917935411, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.03161877917935411 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26605504587155965, + "acc_stderr": 0.018946022322225597, + "acc_norm": 0.26605504587155965, + "acc_norm_stderr": 0.018946022322225597 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.033176727875331574, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.033176727875331574 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.315359477124183, + "acc_stderr": 0.018798086284886883, + "acc_norm": 0.315359477124183, + "acc_norm_stderr": 0.018798086284886883 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.02541642838876748, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.02541642838876748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1948529411764706, + "acc_stderr": 0.02406059942348742, + "acc_norm": 0.1948529411764706, + "acc_norm_stderr": 0.02406059942348742 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.34285714285714286, + "acc_stderr": 0.03038726291954773, + "acc_norm": 0.34285714285714286, + "acc_norm_stderr": 0.03038726291954773 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.030274974880218974, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.030274974880218974 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23378212974296206, + "mc1_stderr": 0.014816195991931586, + "mc2": 0.38671616095132844, + "mc2_stderr": 0.014642090656734802 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30460448642266824, + "acc_stderr": 0.015823367273129385, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.016977101932601532 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/open-llama-2-ko-7b", + "model_sha": "84ae8774f8b586b3b84cb1b0b48860d3fec8745f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/ANHSY_0.1/result_2023-11-10 10:26:46.json b/blueapple8259/ANHSY_0.1/result_2023-11-10 10:26:46.json new file mode 100644 index 0000000000000000000000000000000000000000..31a2edaa2d8b076b087d4366f2ff44e71c8e4a5b --- /dev/null +++ b/blueapple8259/ANHSY_0.1/result_2023-11-10 10:26:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19965870307167236, + "acc_stderr": 0.01168162575688868, + "acc_norm": 0.2593856655290102, + "acc_norm_stderr": 0.012808273573927102 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28759211312487554, + "acc_stderr": 0.0045171484341805035, + "acc_norm": 0.32423819956184025, + "acc_norm_stderr": 0.004671328673217806 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.015671006009339565, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.015671006009339565 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02850485647051421, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02850485647051421 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2604501607717042, + "acc_stderr": 0.02492672322484555, + "acc_norm": 0.2604501607717042, + "acc_norm_stderr": 0.02492672322484555 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267045, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267045 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774631, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774631 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.20168067226890757, + "acc_stderr": 0.02606431340630453, + "acc_norm": 0.20168067226890757, + "acc_norm_stderr": 0.02606431340630453 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02242127361292371, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02242127361292371 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15763546798029557, + "acc_stderr": 0.025639014131172404, + "acc_norm": 0.15763546798029557, + "acc_norm_stderr": 0.025639014131172404 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1967741935483871, + "acc_stderr": 0.02261640942074202, + "acc_norm": 0.1967741935483871, + "acc_norm_stderr": 0.02261640942074202 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.18803418803418803, + "acc_stderr": 0.02559819368665224, + "acc_norm": 0.18803418803418803, + "acc_norm_stderr": 0.02559819368665224 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724136, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724136 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.02277971908873339, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.02277971908873339 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565317, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565317 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.038924311065187525, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.038924311065187525 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24036697247706423, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.24036697247706423, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011743, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.02355083135199509, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.02355083135199509 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.017322789207784326, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.017322789207784326 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432417, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432417 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925324, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925324 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271486, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2163265306122449, + "acc_stderr": 0.026358916334904017, + "acc_norm": 0.2163265306122449, + "acc_norm_stderr": 0.026358916334904017 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.43499689918333406, + "mc2_stderr": 0.015335243970671835 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33412042502951594, + "acc_stderr": 0.016216763304239688, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.017090852631668332 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/ANHSY_0.1", + "model_sha": "62bb441a62b634f0fb14e909bebfabae6506ed39", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/ANHSY_half_0.2/result_2023-11-12 15:17:33.json b/blueapple8259/ANHSY_half_0.2/result_2023-11-12 15:17:33.json new file mode 100644 index 0000000000000000000000000000000000000000..215c58eeab3dfe81f37a8cd2725cd15533358056 --- /dev/null +++ b/blueapple8259/ANHSY_half_0.2/result_2023-11-12 15:17:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20648464163822525, + "acc_stderr": 0.011828865619002316, + "acc_norm": 0.257679180887372, + "acc_norm_stderr": 0.012780770562768402 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2930691097390958, + "acc_stderr": 0.004542396269999217, + "acc_norm": 0.32971519617606054, + "acc_norm_stderr": 0.00469148881303216 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.18128654970760233, + "acc_stderr": 0.029547741687640027, + "acc_norm": 0.18128654970760233, + "acc_norm_stderr": 0.029547741687640027 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26436781609195403, + "acc_stderr": 0.01576998484069052, + "acc_norm": 0.26436781609195403, + "acc_norm_stderr": 0.01576998484069052 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.03329394119073528, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.03329394119073528 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188943, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229132, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229132 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.19696969696969696, + "acc_stderr": 0.028335609732463348, + "acc_norm": 0.19696969696969696, + "acc_norm_stderr": 0.028335609732463348 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.02971914287634285, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.02971914287634285 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.024243783994062167, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.024243783994062167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.02989611429173355, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.02989611429173355 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335137, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335137 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721377, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721377 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.030769444967296018, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.030769444967296018 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21676300578034682, + "acc_stderr": 0.022183477668412856, + "acc_norm": 0.21676300578034682, + "acc_norm_stderr": 0.022183477668412856 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615771, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24691358024691357, + "acc_stderr": 0.023993501709042117, + "acc_norm": 0.24691358024691357, + "acc_norm_stderr": 0.023993501709042117 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836183, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836183 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.01827257581023187, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.01827257581023187 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.025457756696667874, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.025457756696667874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.16447368421052633, + "acc_stderr": 0.030167533468632726, + "acc_norm": 0.16447368421052633, + "acc_norm_stderr": 0.030167533468632726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.01740181671142765, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.01740181671142765 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902013, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902013 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.03562367850095391, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.03562367850095391 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.014333522059217889, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.014333522059217889 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866764, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24119947848761408, + "acc_stderr": 0.010926496102034947, + "acc_norm": 0.24119947848761408, + "acc_norm_stderr": 0.010926496102034947 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693254, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.01522589934082682, + "mc2": 0.4338664363865149, + "mc2_stderr": 0.0154180940348635 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33884297520661155, + "acc_stderr": 0.016272952997019124, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.017014038119297463 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/ANHSY_half_0.2", + "model_sha": "71a877f97ed8246d44a4fe81e7fbc9b5049e4dff", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/ANHSY_test/result_2023-11-11 03:28:51.json b/blueapple8259/ANHSY_test/result_2023-11-11 03:28:51.json new file mode 100644 index 0000000000000000000000000000000000000000..a9daa17be38789788b1154616b2add3788c7e778 --- /dev/null +++ b/blueapple8259/ANHSY_test/result_2023-11-11 03:28:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20136518771331058, + "acc_stderr": 0.011718927477444269, + "acc_norm": 0.25170648464163825, + "acc_norm_stderr": 0.012682496334042961 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2892850029874527, + "acc_stderr": 0.004525037849178839, + "acc_norm": 0.32822146982672773, + "acc_norm_stderr": 0.00468606242115814 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.01588988836256049, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.01588988836256049 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.18723404255319148, + "acc_stderr": 0.025501588341883614, + "acc_norm": 0.18723404255319148, + "acc_norm_stderr": 0.025501588341883614 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.029620227874790458, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.029620227874790458 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.02738140692786896, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.02738140692786896 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.022489389793654835, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.022489389793654835 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252628, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594525, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594525 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230196, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230196 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.03036049015401464, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.03036049015401464 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724148, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724148 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24691358024691357, + "acc_stderr": 0.023993501709042096, + "acc_norm": 0.24691358024691357, + "acc_norm_stderr": 0.023993501709042096 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909902, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909902 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22752293577981653, + "acc_stderr": 0.0179744635787765, + "acc_norm": 0.22752293577981653, + "acc_norm_stderr": 0.0179744635787765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.031298431857438094, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.031298431857438094 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148594, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148594 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843007, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843007 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925328, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925328 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.16326530612244897, + "acc_stderr": 0.023661699177098598, + "acc_norm": 0.16326530612244897, + "acc_norm_stderr": 0.023661699177098598 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2522816166883963, + "acc_stderr": 0.011092789056875229, + "acc_norm": 0.2522816166883963, + "acc_norm_stderr": 0.011092789056875229 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.44204190262154125, + "mc2_stderr": 0.015345648446767756 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3364817001180638, + "acc_stderr": 0.016245085294386556, + "acc_norm": 0.4427390791027155, + "acc_norm_stderr": 0.017077254131556217 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/ANHSY_test", + "model_sha": "eb2f1cb1cc7a4dfab1e641fb65c64293ed14006c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/ANHSY_test2/result_2023-11-11 16:14:35.json b/blueapple8259/ANHSY_test2/result_2023-11-11 16:14:35.json new file mode 100644 index 0000000000000000000000000000000000000000..3545f076310a491abbc7a11cf8976932d11753b7 --- /dev/null +++ b/blueapple8259/ANHSY_test2/result_2023-11-11 16:14:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20733788395904437, + "acc_stderr": 0.011846905782971368, + "acc_norm": 0.2440273037542662, + "acc_norm_stderr": 0.012551447627856259 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2900816570404302, + "acc_stderr": 0.004528723951878253, + "acc_norm": 0.32772356104361683, + "acc_norm_stderr": 0.004684241685200313 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245232, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245232 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.01607312785122125, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.01607312785122125 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.02880998985410299, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.02880998985410299 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18686868686868688, + "acc_stderr": 0.027772533334218967, + "acc_norm": 0.18686868686868688, + "acc_norm_stderr": 0.027772533334218967 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533946, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533946 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.021763733684173933, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.021763733684173933 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462833, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.02795182680892433, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.02795182680892433 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108608, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.038950910157241364, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.038950910157241364 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935558, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935558 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641145, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641145 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633345, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633345 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845335, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796627, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796627 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.023420375478296136, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.023420375478296136 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.01755581809132227, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.01755581809132227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.02512373922687241, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.02512373922687241 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347018, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347018 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767867, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767867 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045519, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045519 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826824, + "mc2": 0.44746742387165916, + "mc2_stderr": 0.015687397901643654 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3435655253837072, + "acc_stderr": 0.016327334806429134, + "acc_norm": 0.4510035419126328, + "acc_norm_stderr": 0.017107618859549357 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/ANHSY_test2", + "model_sha": "bb2cfbebcdf945d14ec8f53215d9f9b5bbc96742", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/TinyKo-V2/result_2023-12-18 13:43:41.json b/blueapple8259/TinyKo-V2/result_2023-12-18 13:43:41.json new file mode 100644 index 0000000000000000000000000000000000000000..c1e3ac2eb9f10293768112cd153d8453d731bcee --- /dev/null +++ b/blueapple8259/TinyKo-V2/result_2023-12-18 13:43:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19880546075085323, + "acc_stderr": 0.011662850198175534, + "acc_norm": 0.2431740614334471, + "acc_norm_stderr": 0.012536554144587096 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25582553276239794, + "acc_stderr": 0.004354325017137537, + "acc_norm": 0.25423222465644296, + "acc_norm_stderr": 0.004345388614520016 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.03301405946987252, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.03301405946987252 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822585, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822585 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2541507024265645, + "acc_stderr": 0.015569254692045792, + "acc_norm": 0.2541507024265645, + "acc_norm_stderr": 0.015569254692045792 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.03547854198560823, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.03547854198560823 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292326, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292326 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824665, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824665 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.032867453125679603, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.032867453125679603 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.02655220782821529, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.02655220782821529 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.026841514322958955, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.026841514322958955 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026945, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026945 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24516129032258063, + "acc_stderr": 0.024472243840895518, + "acc_norm": 0.24516129032258063, + "acc_norm_stderr": 0.024472243840895518 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.02987257770889117, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.02987257770889117 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708094, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708094 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.17880794701986755, + "acc_stderr": 0.031287448506007225, + "acc_norm": 0.17880794701986755, + "acc_norm_stderr": 0.031287448506007225 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834839, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834839 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.024477222856135118, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.024477222856135118 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19170984455958548, + "acc_stderr": 0.02840895362624528, + "acc_norm": 0.19170984455958548, + "acc_norm_stderr": 0.02840895362624528 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02380518652488816, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02380518652488816 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.031546980450822305, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.031546980450822305 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378984, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.14351851851851852, + "acc_stderr": 0.02391077925264438, + "acc_norm": 0.14351851851851852, + "acc_norm_stderr": 0.02391077925264438 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331161, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331161 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.0284588209914603, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.0284588209914603 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02977177522814563, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02977177522814563 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.4992612656093796, + "mc2_stderr": 0.016216678646274893 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2255017709563164, + "acc_stderr": 0.014368122149532182, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556224 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/TinyKo-V2", + "model_sha": "ad90efb3381d5672fa95cc202734e341710e83e8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/TinyKo-V3/result_2023-12-23 12:03:51.json b/blueapple8259/TinyKo-V3/result_2023-12-23 12:03:51.json new file mode 100644 index 0000000000000000000000000000000000000000..3b61ce0eee049b57caa981582baa8140a8e467a2 --- /dev/null +++ b/blueapple8259/TinyKo-V3/result_2023-12-23 12:03:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1962457337883959, + "acc_stderr": 0.011606019881416279, + "acc_norm": 0.2525597269624573, + "acc_norm_stderr": 0.012696728980207708 + }, + "harness|ko_hellaswag|10": { + "acc": 0.26180043815972914, + "acc_stderr": 0.004387161203087963, + "acc_norm": 0.26558454491137223, + "acc_norm_stderr": 0.004407413723383402 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26436781609195403, + "acc_stderr": 0.01576998484069052, + "acc_norm": 0.26436781609195403, + "acc_norm_stderr": 0.01576998484069052 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2297872340425532, + "acc_stderr": 0.027501752944412428, + "acc_norm": 0.2297872340425532, + "acc_norm_stderr": 0.027501752944412428 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.035294868015111155, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.035294868015111155 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455005, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677698, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677698 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02962022787479048, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02962022787479048 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.19327731092436976, + "acc_stderr": 0.025649470265889183, + "acc_norm": 0.19327731092436976, + "acc_norm_stderr": 0.025649470265889183 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.19487179487179487, + "acc_stderr": 0.02008316759518139, + "acc_norm": 0.19487179487179487, + "acc_norm_stderr": 0.02008316759518139 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.029454863835292982, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.029454863835292982 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2161290322580645, + "acc_stderr": 0.02341529343356852, + "acc_norm": 0.2161290322580645, + "acc_norm_stderr": 0.02341529343356852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106748, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106748 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.19090909090909092, + "acc_stderr": 0.03764425585984926, + "acc_norm": 0.19090909090909092, + "acc_norm_stderr": 0.03764425585984926 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844075, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729601, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729601 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788147, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788147 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.033519538795212696, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.033519538795212696 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023132376234543346, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023132376234543346 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796638, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796638 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.024051029739912258, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.024051029739912258 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.01690661592728815, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.01690661592728815 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000536, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000536 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.022571771025494757, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.022571771025494757 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.02671143055553839, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.02671143055553839 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598046, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598046 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113902, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113902 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766375, + "mc2": 0.5182014726410905, + "mc2_stderr": 0.01613083897465494 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2255017709563164, + "acc_stderr": 0.01436812214953218, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.017115418225226862 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/TinyKo-V3", + "model_sha": "e1105108a78beec3508e6a6ee0591aac17e97df9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/TinyKo/result_2023-12-10 12:47:01.json b/blueapple8259/TinyKo/result_2023-12-10 12:47:01.json new file mode 100644 index 0000000000000000000000000000000000000000..c60c09224f26347991cee493998b2fbb61702226 --- /dev/null +++ b/blueapple8259/TinyKo/result_2023-12-10 12:47:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20392491467576793, + "acc_stderr": 0.011774262478702254, + "acc_norm": 0.2721843003412969, + "acc_norm_stderr": 0.013006600406423704 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25054769966142204, + "acc_stderr": 0.004324428538963687, + "acc_norm": 0.25144393547102173, + "acc_norm_stderr": 0.004329565016527316 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20434227330779056, + "acc_stderr": 0.014419123980931904, + "acc_norm": 0.20434227330779056, + "acc_norm_stderr": 0.014419123980931904 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565264, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.10762331838565023, + "acc_stderr": 0.020799400082880004, + "acc_norm": 0.10762331838565023, + "acc_norm_stderr": 0.020799400082880004 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.024121125416941176, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.024121125416941176 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493864, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493864 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577656, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577656 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036622, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036622 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3486238532110092, + "acc_stderr": 0.020431254090714328, + "acc_norm": 0.3486238532110092, + "acc_norm_stderr": 0.020431254090714328 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279053, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279053 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.20751633986928106, + "acc_stderr": 0.016405924270103234, + "acc_norm": 0.20751633986928106, + "acc_norm_stderr": 0.016405924270103234 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226673, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25097783572359844, + "acc_stderr": 0.011073730299187234, + "acc_norm": 0.25097783572359844, + "acc_norm_stderr": 0.011073730299187234 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923403, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091707, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091707 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520698, + "mc2": 0.5110615894861509, + "mc2_stderr": 0.015927383181781492 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.11452184179456906, + "acc_stderr": 0.010948330698808921, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.016819438642971408 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/TinyKo", + "model_sha": "2cc49adce5ca81f1cb4b406d68c3e80d0270e4d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/TinyKoWiki-v1/result_2023-12-25 02:41:39.json b/blueapple8259/TinyKoWiki-v1/result_2023-12-25 02:41:39.json new file mode 100644 index 0000000000000000000000000000000000000000..a9688fae89da5234e94a78654b1a9a38ea4073e8 --- /dev/null +++ b/blueapple8259/TinyKoWiki-v1/result_2023-12-25 02:41:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1945392491467577, + "acc_stderr": 0.011567709174648728, + "acc_norm": 0.23208191126279865, + "acc_norm_stderr": 0.012336718284948853 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2528380800637323, + "acc_stderr": 0.0043375063448999164, + "acc_norm": 0.25144393547102173, + "acc_norm_stderr": 0.004329565016527321 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777573, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066654, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066654 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663925, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663925 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188936, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188936 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.029442495585857476, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.029442495585857476 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.03375672449560554, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.03375672449560554 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.02665353159671548, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.02665353159671548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722127995, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722127995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23225806451612904, + "acc_stderr": 0.02402225613030824, + "acc_norm": 0.23225806451612904, + "acc_norm_stderr": 0.02402225613030824 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914407, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914407 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.025907897122408173, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.025907897122408173 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.038313051408846006, + "acc_norm": 0.2, + "acc_norm_stderr": 0.038313051408846006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230172, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230172 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.033742355504256936, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.033742355504256936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014666, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014666 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.024257901705323374, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.024257901705323374 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.025407197798890165, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.025407197798890165 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.029778663037752947, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.029778663037752947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21284403669724772, + "acc_stderr": 0.01754937638931369, + "acc_norm": 0.21284403669724772, + "acc_norm_stderr": 0.01754937638931369 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.02463004897982478, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.02463004897982478 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312337, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312337 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2630718954248366, + "acc_stderr": 0.01781267654232065, + "acc_norm": 0.2630718954248366, + "acc_norm_stderr": 0.01781267654232065 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872405, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.02699145450203673, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.02699145450203673 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.024231013370541087, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.024231013370541087 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866767, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866767 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145628, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145628 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.0148697550158711, + "mc2": 0.4810748840249365, + "mc2_stderr": 0.01638238136567759 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.18890200708382526, + "acc_stderr": 0.013457666696421402, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.017173944474294378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/TinyKoWiki-v1", + "model_sha": "42bb6fcd1ed5b902e4ca07e25358c8a13c3e5bb3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/test_model1/result_2023-12-05 03:42:51.json b/blueapple8259/test_model1/result_2023-12-05 03:42:51.json new file mode 100644 index 0000000000000000000000000000000000000000..1e2132945f14d66ae44c0748e07854fe3006741b --- /dev/null +++ b/blueapple8259/test_model1/result_2023-12-05 03:42:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2030716723549488, + "acc_stderr": 0.011755899303705582, + "acc_norm": 0.25426621160409557, + "acc_norm_stderr": 0.01272499994515774 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2535351523600876, + "acc_stderr": 0.00434145484189233, + "acc_norm": 0.2502489543915555, + "acc_norm_stderr": 0.004322710911026373 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209195, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209195 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.0376017800602662, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.0376017800602662 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2784163473818646, + "acc_stderr": 0.01602829518899246, + "acc_norm": 0.2784163473818646, + "acc_norm_stderr": 0.01602829518899246 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.0261488180184245, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0261488180184245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789413, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789413 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.19083969465648856, + "acc_stderr": 0.03446513350752599, + "acc_norm": 0.19083969465648856, + "acc_norm_stderr": 0.03446513350752599 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.03831226048850333, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.03831226048850333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714506, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714506 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380565, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380565 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.02110773012724399, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.02110773012724399 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.032550867699701024, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.032550867699701024 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24516129032258063, + "acc_stderr": 0.02447224384089553, + "acc_norm": 0.24516129032258063, + "acc_norm_stderr": 0.02447224384089553 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.027236013946196694, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.027236013946196694 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.026616482980501722, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.026616482980501722 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.03831305140884601, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03831305140884601 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008937, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008937 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213322, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213322 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.03063114553919882, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.03063114553919882 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577615, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577615 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.024569223600460852, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.024569223600460852 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.01827257581023187, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.01827257581023187 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341016, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341016 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.01740181671142765, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.01740181671142765 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.14814814814814814, + "acc_stderr": 0.024227629273728356, + "acc_norm": 0.14814814814814814, + "acc_norm_stderr": 0.024227629273728356 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2324022346368715, + "acc_stderr": 0.0141259687546734, + "acc_norm": 0.2324022346368715, + "acc_norm_stderr": 0.0141259687546734 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20955882352941177, + "acc_stderr": 0.02472311040767707, + "acc_norm": 0.20955882352941177, + "acc_norm_stderr": 0.02472311040767707 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.026711430555538433, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.026711430555538433 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22784810126582278, + "acc_stderr": 0.027303484599069443, + "acc_norm": 0.22784810126582278, + "acc_norm_stderr": 0.027303484599069443 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23859191655801826, + "acc_stderr": 0.010885929742002205, + "acc_norm": 0.23859191655801826, + "acc_norm_stderr": 0.010885929742002205 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501964, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501964 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.49748125321542586, + "mc2_stderr": 0.016335585216269707 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.19834710743801653, + "acc_stderr": 0.013709478542303373, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.01705263355985607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/test_model1", + "model_sha": "4e1233d9d69f819778b8969ea74dc5a01db876b3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/boracious/llama-2-7b-test/result_2023-12-24 14:02:24.json b/boracious/llama-2-7b-test/result_2023-12-24 14:02:24.json new file mode 100644 index 0000000000000000000000000000000000000000..62afce3d6d8c9afab117f1cecc37053228c01af3 --- /dev/null +++ b/boracious/llama-2-7b-test/result_2023-12-24 14:02:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.18771331058020477, + "acc_stderr": 0.011411001314155133, + "acc_norm": 0.23976109215017063, + "acc_norm_stderr": 0.012476304127453956 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2492531368253336, + "acc_stderr": 0.004316965678675089, + "acc_norm": 0.24447321250746862, + "acc_norm_stderr": 0.004288960926085629 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.031885780176863984, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.031885780176863984 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2886334610472541, + "acc_stderr": 0.016203792703197797, + "acc_norm": 0.2886334610472541, + "acc_norm_stderr": 0.016203792703197797 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614866, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614866 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.029620227874790454, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.029620227874790454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.02110773012724399, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.02110773012724399 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.17880794701986755, + "acc_stderr": 0.03128744850600724, + "acc_norm": 0.17880794701986755, + "acc_norm_stderr": 0.03128744850600724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803627, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803627 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.14, + "acc_stderr": 0.0348735088019777, + "acc_norm": 0.14, + "acc_norm_stderr": 0.0348735088019777 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.48362811394951233, + "mc2_stderr": 0.017114971034498782 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0743801652892562, + "acc_stderr": 0.009021104510906089, + "acc_norm": 0.30342384887839435, + "acc_norm_stderr": 0.015806072717909563 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "boracious/llama-2-7b-test", + "model_sha": "e9591396303478caf649713e73939f348109529b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-3data-merged/result_2023-10-09 14:50:39.json b/caisarl76/Mistral-7B-3data-merged/result_2023-10-09 14:50:39.json new file mode 100644 index 0000000000000000000000000000000000000000..3d622996c34b82e2a6e33c02e1d0b8b2093f0e7b --- /dev/null +++ b/caisarl76/Mistral-7B-3data-merged/result_2023-10-09 14:50:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946528, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407166 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37094204341764586, + "acc_stderr": 0.004820697457420419, + "acc_norm": 0.47480581557458673, + "acc_norm_stderr": 0.0049834428886777705 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4495530012771392, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.4495530012771392, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.028043399858210628, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.028043399858210628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.03941707632064889, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.03941707632064889 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.0242831405294673, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.0242831405294673 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199596, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959316, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959316 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267437, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267437 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42385321100917434, + "acc_stderr": 0.021187263209087516, + "acc_norm": 0.42385321100917434, + "acc_norm_stderr": 0.021187263209087516 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.039531733777491924, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.039531733777491924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.01920660684882537, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.01920660684882537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755805, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755805 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527829, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527829 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411952, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411952 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.029923100563683906, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.029923100563683906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.01211793999870587, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.01211793999870587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.0345423658538061, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.0345423658538061 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.015744027248256055, + "mc2": 0.45994906823090903, + "mc2_stderr": 0.01581120469816343 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3789846517119244, + "acc_stderr": 0.01667926068422929, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.0170725258755631 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-3data-merged", + "model_sha": "7df44d1c021898b608f741519016e4fd1373e636", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-Openorca-cot-2157/result_2023-10-23 00:34:03.json b/caisarl76/Mistral-7B-Openorca-cot-2157/result_2023-10-23 00:34:03.json new file mode 100644 index 0000000000000000000000000000000000000000..7d20aca2d4aa615f8033e2ef5a46ab546b999e2f --- /dev/null +++ b/caisarl76/Mistral-7B-Openorca-cot-2157/result_2023-10-23 00:34:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2226962457337884, + "acc_stderr": 0.012158314774829919, + "acc_norm": 0.2960750853242321, + "acc_norm_stderr": 0.013340916085246254 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2795259908384784, + "acc_stderr": 0.004478491697891243, + "acc_norm": 0.30870344552877915, + "acc_norm_stderr": 0.004610143575553467 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.03753638955761691, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.03753638955761691 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.04721188506097173, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.04721188506097173 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36909323116219667, + "acc_stderr": 0.01725628310912461, + "acc_norm": 0.36909323116219667, + "acc_norm_stderr": 0.01725628310912461 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122155, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122155 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467766, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467766 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.03304205087813652, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.03304205087813652 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378948, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378948 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.02947248583313609, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.02947248583313609 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3230769230769231, + "acc_stderr": 0.023710888501970565, + "acc_norm": 0.3230769230769231, + "acc_norm_stderr": 0.023710888501970565 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22167487684729065, + "acc_stderr": 0.029225575892489614, + "acc_norm": 0.22167487684729065, + "acc_norm_stderr": 0.029225575892489614 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34838709677419355, + "acc_stderr": 0.02710482632810094, + "acc_norm": 0.34838709677419355, + "acc_norm_stderr": 0.02710482632810094 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.03274531938842351, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.03274531938842351 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.028637235639800928, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.028637235639800928 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696525, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43781094527363185, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.43781094527363185, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101813, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101813 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33815028901734107, + "acc_stderr": 0.02546977014940017, + "acc_norm": 0.33815028901734107, + "acc_norm_stderr": 0.02546977014940017 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.02610567386140981, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.02610567386140981 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35233160621761656, + "acc_stderr": 0.03447478286414358, + "acc_norm": 0.35233160621761656, + "acc_norm_stderr": 0.03447478286414358 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.026716118380156837, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.026716118380156837 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.045454545454545456, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.045454545454545456 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137283, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137283 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.01887568293806944, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.01887568293806944 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093933, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093933 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966351, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966351 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145312, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145312 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2542372881355932, + "acc_stderr": 0.011121129007840676, + "acc_norm": 0.2542372881355932, + "acc_norm_stderr": 0.011121129007840676 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.40606060606060607, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.40606060606060607, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476189, + "mc2": 0.4874892521316813, + "mc2_stderr": 0.017011135502882097 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.16646989374262103, + "acc_stderr": 0.01280687925641312, + "acc_norm": 0.2833530106257379, + "acc_norm_stderr": 0.015492852084597233 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-Openorca-cot-2157", + "model_sha": "eaf722c66f6bbb64f7f43d08bc9de3b36be29d2b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-eng-kor-cot-combined/result_2023-10-23 00:34:59.json b/caisarl76/Mistral-7B-eng-kor-cot-combined/result_2023-10-23 00:34:59.json new file mode 100644 index 0000000000000000000000000000000000000000..2ee2428f3c460e53e68e02640fe2f9154daf6e2b --- /dev/null +++ b/caisarl76/Mistral-7B-eng-kor-cot-combined/result_2023-10-23 00:34:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2901023890784983, + "acc_stderr": 0.01326157367752077, + "acc_norm": 0.34812286689419797, + "acc_norm_stderr": 0.013921008595179335 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35012945628360886, + "acc_stderr": 0.004760354191370866, + "acc_norm": 0.4374626568412667, + "acc_norm_stderr": 0.0049505983006675565 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.0484674825397724, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.0484674825397724 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.017612204084663775, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.017612204084663775 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.039446241625011175, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.039446241625011175 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.03006676158297793, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.03006676158297793 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052452, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052452 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5427350427350427, + "acc_stderr": 0.03263622596380688, + "acc_norm": 0.5427350427350427, + "acc_norm_stderr": 0.03263622596380688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.028254200344438662, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.028254200344438662 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4427860696517413, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.4427860696517413, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269995, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269995 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.026571483480719967, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.026571483480719967 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38860103626943004, + "acc_stderr": 0.035177397963731316, + "acc_norm": 0.38860103626943004, + "acc_norm_stderr": 0.035177397963731316 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3908256880733945, + "acc_stderr": 0.020920058346111065, + "acc_norm": 0.3908256880733945, + "acc_norm_stderr": 0.020920058346111065 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159624, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159624 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.48760330578512395, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.01871806705262323, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.01871806705262323 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510927, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510927 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.013754835975482336, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.013754835975482336 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.029923100563683906, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.029923100563683906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2907431551499348, + "acc_stderr": 0.011598062372851974, + "acc_norm": 0.2907431551499348, + "acc_norm_stderr": 0.011598062372851974 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.03402272044340705, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.03402272044340705 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4699106773315303, + "mc2_stderr": 0.01582978440702906 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22904368358913813, + "acc_stderr": 0.014447372277253822, + "acc_norm": 0.24557260920897284, + "acc_norm_stderr": 0.014798357154972826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-eng-kor-cot-combined", + "model_sha": "d7e959c88fdc316602494d1ffd2bf52d33371f89", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-orca-1k-platy-1k/result_2023-10-22 12:42:37.json b/caisarl76/Mistral-7B-orca-1k-platy-1k/result_2023-10-22 12:42:37.json new file mode 100644 index 0000000000000000000000000000000000000000..c9c428dd4b099bccd150980f6214fe391755a829 --- /dev/null +++ b/caisarl76/Mistral-7B-orca-1k-platy-1k/result_2023-10-22 12:42:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623497, + "acc_norm": 0.3660409556313993, + "acc_norm_stderr": 0.014077223108470139 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37183827922724555, + "acc_stderr": 0.004823078145064963, + "acc_norm": 0.45947022505477, + "acc_norm_stderr": 0.00497336133916965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.03805797505590459, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.03805797505590459 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4367816091954023, + "acc_stderr": 0.017736470837800677, + "acc_norm": 0.4367816091954023, + "acc_norm_stderr": 0.017736470837800677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894245, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.032361983509282766, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.032361983509282766 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307808, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307808 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.024784316942156374, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.024784316942156374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3584905660377358, + "acc_stderr": 0.029514703583981755, + "acc_norm": 0.3584905660377358, + "acc_norm_stderr": 0.029514703583981755 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066485, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.03533133389323657, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.03533133389323657 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.02386520683697259, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.02386520683697259 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.02704453813840262, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.02704453813840262 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43486238532110094, + "acc_stderr": 0.02125463146560928, + "acc_norm": 0.43486238532110094, + "acc_norm_stderr": 0.02125463146560928 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03782728980865469, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03782728980865469 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30614525139664805, + "acc_stderr": 0.01541449448790321, + "acc_norm": 0.30614525139664805, + "acc_norm_stderr": 0.01541449448790321 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.012117939998705876, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.012117939998705876 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236618, + "mc2": 0.4769559005507783, + "mc2_stderr": 0.015879206203595765 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.34946871310507677, + "acc_stderr": 0.01639279708576985, + "acc_norm": 0.39787485242030696, + "acc_norm_stderr": 0.01682795905473338 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-orca-1k-platy-1k", + "model_sha": "528d7bcaa2489daeea58946d17b341b55946f21b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-orca-platy-1k-ko-f-1871/result_2023-10-22 22:35:04.json b/caisarl76/Mistral-7B-orca-platy-1k-ko-f-1871/result_2023-10-22 22:35:04.json new file mode 100644 index 0000000000000000000000000000000000000000..815017928d2ffa29e4ac6c279492e289eb393149 --- /dev/null +++ b/caisarl76/Mistral-7B-orca-platy-1k-ko-f-1871/result_2023-10-22 22:35:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3267918088737201, + "acc_stderr": 0.013706665975587333, + "acc_norm": 0.3779863481228669, + "acc_norm_stderr": 0.014169664520303103 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3707428799044015, + "acc_stderr": 0.004820166002253069, + "acc_norm": 0.4790878311093408, + "acc_norm_stderr": 0.004985415250690911 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041692, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041692 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38569604086845466, + "acc_stderr": 0.017406476619212904, + "acc_norm": 0.38569604086845466, + "acc_norm_stderr": 0.017406476619212904 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02850485647051418, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02850485647051418 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3536977491961415, + "acc_stderr": 0.027155208103200865, + "acc_norm": 0.3536977491961415, + "acc_norm_stderr": 0.027155208103200865 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2556053811659193, + "acc_stderr": 0.029275891003969923, + "acc_norm": 0.2556053811659193, + "acc_norm_stderr": 0.029275891003969923 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462202, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462202 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.03394853965156403, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.03394853965156403 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.02959732973097809, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.02959732973097809 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.02281581309889661, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.02281581309889661 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.027273890594300642, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.027273890594300642 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.032485775115784, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.032485775115784 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2830188679245283, + "acc_stderr": 0.027724236492700907, + "acc_norm": 0.2830188679245283, + "acc_norm_stderr": 0.027724236492700907 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946458, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946458 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4228855721393035, + "acc_stderr": 0.03493231777421281, + "acc_norm": 0.4228855721393035, + "acc_norm_stderr": 0.03493231777421281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101803, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101803 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566016, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.025722802200895813, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.025722802200895813 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924034, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924034 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.02704453813840262, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.02704453813840262 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29015544041450775, + "acc_stderr": 0.03275264467791515, + "acc_norm": 0.29015544041450775, + "acc_norm_stderr": 0.03275264467791515 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3357798165137615, + "acc_stderr": 0.02024808139675293, + "acc_norm": 0.3357798165137615, + "acc_norm_stderr": 0.02024808139675293 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.026643278474508758, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.026643278474508758 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.045454545454545484, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.045454545454545484 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.01846315413263281, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.01846315413263281 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.01450897945355399, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.01450897945355399 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21323529411764705, + "acc_stderr": 0.024880971512294268, + "acc_norm": 0.21323529411764705, + "acc_norm_stderr": 0.024880971512294268 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.03029950656215418, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.03029950656215418 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.41350210970464135, + "acc_stderr": 0.03205649904851859, + "acc_norm": 0.41350210970464135, + "acc_norm_stderr": 0.03205649904851859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.288135593220339, + "acc_stderr": 0.011567140661324565, + "acc_norm": 0.288135593220339, + "acc_norm_stderr": 0.011567140661324565 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03681050869161549, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03681050869161549 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.45737169951487844, + "mc2_stderr": 0.015829256462411827 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3955135773317591, + "acc_stderr": 0.016810815902206046, + "acc_norm": 0.4498229043683589, + "acc_norm_stderr": 0.01710357334382571 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-orca-platy-1k-ko-f-1871", + "model_sha": "648931fc59553f86c011a4e312d6fc0ee93d4b37", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-orca-platy-2k-ep4/result_2023-10-22 15:12:08.json b/caisarl76/Mistral-7B-orca-platy-2k-ep4/result_2023-10-22 15:12:08.json new file mode 100644 index 0000000000000000000000000000000000000000..98d3ff59a477dbb70380e62df9510cb466fdb91d --- /dev/null +++ b/caisarl76/Mistral-7B-orca-platy-2k-ep4/result_2023-10-22 15:12:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.295221843003413, + "acc_stderr": 0.013329750293382316, + "acc_norm": 0.3430034129692833, + "acc_norm_stderr": 0.013872423223718167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37054371639115713, + "acc_stderr": 0.0048196336688325495, + "acc_norm": 0.46345349531965746, + "acc_norm_stderr": 0.00497643438746997 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.037792759455032, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.037792759455032 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39719029374201786, + "acc_stderr": 0.01749790503715938, + "acc_norm": 0.39719029374201786, + "acc_norm_stderr": 0.01749790503715938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628813, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628813 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.036643147772880864, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.036643147772880864 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138623, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138623 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032488, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032488 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5299145299145299, + "acc_stderr": 0.03269741106812443, + "acc_norm": 0.5299145299145299, + "acc_norm_stderr": 0.03269741106812443 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670238, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670238 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371218, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371218 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730578, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730578 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.025816756791584215, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.025816756791584215 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269952, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269952 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144809, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144809 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41100917431192663, + "acc_stderr": 0.021095050687277638, + "acc_norm": 0.41100917431192663, + "acc_norm_stderr": 0.021095050687277638 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4297520661157025, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849726, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.01840341571010979, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.01840341571010979 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2905027932960894, + "acc_stderr": 0.015183844307206157, + "acc_norm": 0.2905027932960894, + "acc_norm_stderr": 0.015183844307206157 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.02981263070156974, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.02981263070156974 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.03093285879278984, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.03093285879278984 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4810126582278481, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.4810126582278481, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2966101694915254, + "acc_stderr": 0.011665946586082844, + "acc_norm": 0.2966101694915254, + "acc_norm_stderr": 0.011665946586082844 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.03393388584958403, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.03393388584958403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502346, + "mc2": 0.449359001521154, + "mc2_stderr": 0.016084396495163696 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.016114023894800336, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.01646770698152745 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-orca-platy-2k-ep4", + "model_sha": "fd2682689d7efd4dd350d71f64a7a8ff09842fd7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4/result_2023-10-22 15:19:27.json b/caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4/result_2023-10-22 15:19:27.json new file mode 100644 index 0000000000000000000000000000000000000000..37a5c715b756d1f7ac23f6e90a2aa1674a39c07a --- /dev/null +++ b/caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4/result_2023-10-22 15:19:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.318259385665529, + "acc_stderr": 0.013611993916971451, + "acc_norm": 0.3583617747440273, + "acc_norm_stderr": 0.01401288333485986 + }, + "harness|ko_hellaswag|10": { + "acc": 0.368352917745469, + "acc_stderr": 0.00481371995282996, + "acc_norm": 0.46265684126667994, + "acc_norm_stderr": 0.0049758453350866195 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44061302681992337, + "acc_stderr": 0.017753396973908486, + "acc_norm": 0.44061302681992337, + "acc_norm_stderr": 0.017753396973908486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.02812534098397271, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.02812534098397271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.03114679648297246, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.03114679648297246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38974358974358975, + "acc_stderr": 0.024726967886647078, + "acc_norm": 0.38974358974358975, + "acc_norm_stderr": 0.024726967886647078 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.35161290322580646, + "acc_stderr": 0.027162537826948458, + "acc_norm": 0.35161290322580646, + "acc_norm_stderr": 0.027162537826948458 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413866, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413866 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199596, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.02413015829976262, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.02413015829976262 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.025906632631016127, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.025906632631016127 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3734567901234568, + "acc_stderr": 0.02691500301138015, + "acc_norm": 0.3734567901234568, + "acc_norm_stderr": 0.02691500301138015 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45688073394495415, + "acc_stderr": 0.021357458785226206, + "acc_norm": 0.45688073394495415, + "acc_norm_stderr": 0.021357458785226206 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.027245613047215362, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.027245613047215362 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495175, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495175 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503803, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503803 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767867, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767867 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.03000856284500348, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.03000856284500348 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.030555316755573637, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.030555316755573637 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45569620253164556, + "acc_stderr": 0.03241920684693334, + "acc_norm": 0.45569620253164556, + "acc_norm_stderr": 0.03241920684693334 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2835723598435463, + "acc_stderr": 0.011511900775968302, + "acc_norm": 0.2835723598435463, + "acc_norm_stderr": 0.011511900775968302 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.459471439183592, + "mc2_stderr": 0.016149154578981872 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.345926800472255, + "acc_stderr": 0.016353853414347568, + "acc_norm": 0.3624557260920897, + "acc_norm_stderr": 0.016527131240453713 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4", + "model_sha": "e3e91aad9d307bf43b516f95440a35a1db3e1c68", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-en-llama2-13b-finetune-ex/result_2023-11-07 01:30:27.json b/cepiloth/ko-en-llama2-13b-finetune-ex/result_2023-11-07 01:30:27.json new file mode 100644 index 0000000000000000000000000000000000000000..7d73b0b554536b2c20f82b3a58ad8fac06147c22 --- /dev/null +++ b/cepiloth/ko-en-llama2-13b-finetune-ex/result_2023-11-07 01:30:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470137, + "acc_norm": 0.4035836177474403, + "acc_norm_stderr": 0.01433715891426844 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3948416650069707, + "acc_stderr": 0.004878176541703574, + "acc_norm": 0.5118502290380402, + "acc_norm_stderr": 0.004988379805261165 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.017758800534214417, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.017758800534214417 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785139, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785139 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02850485647051419, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02850485647051419 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.02791705074848462, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.02791705074848462 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.040131241954243856, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.040131241954243856 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34102564102564104, + "acc_stderr": 0.024035489676335065, + "acc_norm": 0.34102564102564104, + "acc_norm_stderr": 0.024035489676335065 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3419354838709677, + "acc_stderr": 0.02698528957655274, + "acc_norm": 0.3419354838709677, + "acc_norm_stderr": 0.02698528957655274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.043091187099464585, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.043091187099464585 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473075, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.035118075718047245, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.035118075718047245 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3681592039800995, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.3681592039800995, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.0336876293225943, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.0336876293225943 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655805, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655805 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686934, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686934 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0258167567915842, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0258167567915842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935574, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935574 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3734567901234568, + "acc_stderr": 0.026915003011380147, + "acc_norm": 0.3734567901234568, + "acc_norm_stderr": 0.026915003011380147 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39896373056994816, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.39896373056994816, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3926605504587156, + "acc_stderr": 0.020937505161201093, + "acc_norm": 0.3926605504587156, + "acc_norm_stderr": 0.020937505161201093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.027420477662629245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.027420477662629245 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.48760330578512395, + "acc_stderr": 0.045629515481807666, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.045629515481807666 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.017401816711427653, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.017401816711427653 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.02736586113151381, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.02736586113151381 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3881856540084388, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.3881856540084388, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28552803129074317, + "acc_stderr": 0.011535751586665668, + "acc_norm": 0.28552803129074317, + "acc_norm_stderr": 0.011535751586665668 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399811, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399811 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882466, + "mc2": 0.48334405699140953, + "mc2_stderr": 0.015932530840786423 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33884297520661155, + "acc_stderr": 0.01627295299701912, + "acc_norm": 0.3789846517119244, + "acc_norm_stderr": 0.016679260684229282 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-en-llama2-13b-finetune-ex", + "model_sha": "ee6a38bb61742af106567d743b3d87458a303f60", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-en-llama2-13b-finetune/result_2023-11-03 07:39:57.json b/cepiloth/ko-en-llama2-13b-finetune/result_2023-11-03 07:39:57.json new file mode 100644 index 0000000000000000000000000000000000000000..c62c614f2db27dc6b23abf0be1b742a3ed47277d --- /dev/null +++ b/cepiloth/ko-en-llama2-13b-finetune/result_2023-11-03 07:39:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407163, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650649 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40310695080661224, + "acc_stderr": 0.0048951941438926784, + "acc_norm": 0.536247759410476, + "acc_norm_stderr": 0.004976651989757641 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5006385696040868, + "acc_stderr": 0.01787994891443169, + "acc_norm": 0.5006385696040868, + "acc_norm_stderr": 0.01787994891443169 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.02812534098397271, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.02812534098397271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165894, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123936, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123936 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.026636539741116076, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.026636539741116076 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.02762873715566878, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.02762873715566878 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42385321100917434, + "acc_stderr": 0.021187263209087533, + "acc_norm": 0.42385321100917434, + "acc_norm_stderr": 0.021187263209087533 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.02811092849280907, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.02811092849280907 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.01895088677080631, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.01895088677080631 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509317, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.03085199299325701, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.03085199299325701 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777562, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777562 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.46835443037974683, + "acc_stderr": 0.03248197400511075, + "acc_norm": 0.46835443037974683, + "acc_norm_stderr": 0.03248197400511075 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271824, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271824 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.45610675413247587, + "mc2_stderr": 0.01508637089874796 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41440377804014167, + "acc_stderr": 0.016936583383943625, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-en-llama2-13b-finetune", + "model_sha": "966347fa24706fb7265c1967e3212504ad0f32da", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-llama2-13b-finetune-ex/result_2023-11-02 09:31:28.json b/cepiloth/ko-llama2-13b-finetune-ex/result_2023-11-02 09:31:28.json new file mode 100644 index 0000000000000000000000000000000000000000..12dbc0f8fce70a85c20744eeb64786266662f87f --- /dev/null +++ b/cepiloth/ko-llama2-13b-finetune-ex/result_2023-11-02 09:31:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.36945392491467577, + "acc_norm_stderr": 0.014104578366491904 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35620394343756223, + "acc_stderr": 0.004778978031389642, + "acc_norm": 0.45488946425014937, + "acc_norm_stderr": 0.004969431900874307 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626544, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626544 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44699872286079184, + "acc_stderr": 0.017779225233394213, + "acc_norm": 0.44699872286079184, + "acc_norm_stderr": 0.017779225233394213 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288088, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288088 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36012861736334406, + "acc_stderr": 0.027264297599804012, + "acc_norm": 0.36012861736334406, + "acc_norm_stderr": 0.027264297599804012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.040131241954243856, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.040131241954243856 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849738, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849738 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641086, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641086 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0242785680243077, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0242785680243077 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43641618497109824, + "acc_stderr": 0.026700545424943677, + "acc_norm": 0.43641618497109824, + "acc_norm_stderr": 0.026700545424943677 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.36728395061728397, + "acc_stderr": 0.026822801759507887, + "acc_norm": 0.36728395061728397, + "acc_norm_stderr": 0.026822801759507887 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.035415085788840193, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.035415085788840193 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41834862385321103, + "acc_stderr": 0.02114954859644388, + "acc_norm": 0.41834862385321103, + "acc_norm_stderr": 0.02114954859644388 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.018249024411207668, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.018249024411207668 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467764, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467764 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293647, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293647 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.02902942281568141, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.02902942281568141 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.03078154910202622, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.03078154910202622 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.011559337355708505, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.011559337355708505 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.037818873532059816, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.037818873532059816 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589685, + "mc2": 0.4639200463938291, + "mc2_stderr": 0.015440957243862982 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3612750885478158, + "acc_stderr": 0.016515463022412, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.017014038119297473 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-llama2-13b-finetune-ex", + "model_sha": "f1dcbe9a1ff2ea479a2094f5058226f796341bfd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-llama2-13b-finetune/result_2023-11-01 09:20:47.json b/cepiloth/ko-llama2-13b-finetune/result_2023-11-01 09:20:47.json new file mode 100644 index 0000000000000000000000000000000000000000..5b16010eff13931b9c3f1a1447fdbf0f52e10749 --- /dev/null +++ b/cepiloth/ko-llama2-13b-finetune/result_2023-11-01 09:20:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.01359243151906808, + "acc_norm": 0.38310580204778155, + "acc_norm_stderr": 0.014206472661672876 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3536148177653854, + "acc_stderr": 0.004771143074426131, + "acc_norm": 0.45359490141406095, + "acc_norm_stderr": 0.004968244611429387 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.017784034534992454, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.017784034534992454 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.039154506304142495, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.039154506304142495 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.035594435655639196, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.035594435655639196 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236153, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236153 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.03308530426228258, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.03308530426228258 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165894, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.032485775115784, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.032485775115784 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.029946498567699945, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.029946498567699945 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.0472457740573157, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.0472457740573157 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3959537572254335, + "acc_stderr": 0.026329813341946253, + "acc_norm": 0.3959537572254335, + "acc_norm_stderr": 0.026329813341946253 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3549382716049383, + "acc_stderr": 0.02662415247884585, + "acc_norm": 0.3549382716049383, + "acc_norm_stderr": 0.02662415247884585 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42201834862385323, + "acc_stderr": 0.021174991407763178, + "acc_norm": 0.42201834862385323, + "acc_norm_stderr": 0.021174991407763178 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.018433427649401896, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.018433427649401896 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902002, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902002 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.0279715413701706, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.0279715413701706 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.03119223072679566, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.03119223072679566 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.03078154910202622, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.03078154910202622 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27249022164276404, + "acc_stderr": 0.011371658294311532, + "acc_norm": 0.27249022164276404, + "acc_norm_stderr": 0.011371658294311532 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03308611113236434, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03308611113236434 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512566, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512566 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.46645373213159264, + "mc2_stderr": 0.015378490920195349 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33884297520661155, + "acc_stderr": 0.016272952997019124, + "acc_norm": 0.4014167650531287, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-llama2-13b-finetune", + "model_sha": "15f8932879b2e7880baf3402b1a150f9ff36d370", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-llama2-finetune-ex2/result_2023-10-26 09:22:05.json b/cepiloth/ko-llama2-finetune-ex2/result_2023-10-26 09:22:05.json new file mode 100644 index 0000000000000000000000000000000000000000..8cdc9afaa6a3b72af8afc8c188a6a425551c22d3 --- /dev/null +++ b/cepiloth/ko-llama2-finetune-ex2/result_2023-10-26 09:22:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2764505119453925, + "acc_stderr": 0.013069662474252425, + "acc_norm": 0.3216723549488055, + "acc_norm_stderr": 0.013650488084494162 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3296156144194384, + "acc_stderr": 0.004691128722535481, + "acc_norm": 0.4091814379605656, + "acc_norm_stderr": 0.004906779523192671 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39080459770114945, + "acc_stderr": 0.01744836606706253, + "acc_norm": 0.39080459770114945, + "acc_norm_stderr": 0.01744836606706253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501117, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501117 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122145, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122145 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.02755994980234782, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.02755994980234782 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.038552896163789464, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.038552896163789464 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03104194130405927, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03104194130405927 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204426, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204426 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970187, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970187 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.027379871229943245, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.027379871229943245 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4700854700854701, + "acc_stderr": 0.03269741106812443, + "acc_norm": 0.4700854700854701, + "acc_norm_stderr": 0.03269741106812443 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.02881561571343211, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.02881561571343211 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683522, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683522 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43283582089552236, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.43283582089552236, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.025624723994030457, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.025624723994030457 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02712511551316686, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02712511551316686 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089117, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089117 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30275229357798167, + "acc_stderr": 0.019698711434756357, + "acc_norm": 0.30275229357798167, + "acc_norm_stderr": 0.019698711434756357 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.027184498909941613, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.027184498909941613 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.036906779861372814, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.036906779861372814 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275915, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275915 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984302, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984302 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536027, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536027 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261446, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261446 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.02533684856333236, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.02533684856333236 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584353, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25945241199478486, + "acc_stderr": 0.011195262076350314, + "acc_norm": 0.25945241199478486, + "acc_norm_stderr": 0.011195262076350314 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.03228210387037892, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.03228210387037892 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4547120708605401, + "mc2_stderr": 0.015426627135169792 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.015141752199573205, + "acc_norm": 0.3530106257378985, + "acc_norm_stderr": 0.016430745982427126 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-llama2-finetune-ex2", + "model_sha": "ab3114ee91616a692eee5bfa8e238f6f821e89b8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-llama2-finetune-ex3/result_2023-10-31 06:39:25.json b/cepiloth/ko-llama2-finetune-ex3/result_2023-10-31 06:39:25.json new file mode 100644 index 0000000000000000000000000000000000000000..3f686bf802830bf8fd6db64c7835200463e5c110 --- /dev/null +++ b/cepiloth/ko-llama2-finetune-ex3/result_2023-10-31 06:39:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2764505119453925, + "acc_stderr": 0.013069662474252428, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33031268671579367, + "acc_stderr": 0.004693644357202052, + "acc_norm": 0.41147181836287594, + "acc_norm_stderr": 0.004910946424771612 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36015325670498083, + "acc_stderr": 0.017166362471369306, + "acc_norm": 0.36015325670498083, + "acc_norm_stderr": 0.017166362471369306 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745667, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745667 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553026, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553026 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788513, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788513 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165085, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165085 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003337, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003337 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342853, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342853 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.021444547301560465, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.021444547301560465 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022895, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022895 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4017094017094017, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.4017094017094017, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2830188679245283, + "acc_stderr": 0.027724236492700897, + "acc_norm": 0.2830188679245283, + "acc_norm_stderr": 0.027724236492700897 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39800995024875624, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.39800995024875624, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.03533133389323657, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.03533133389323657 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415426, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415426 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554857, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554857 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.02494679222527231, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.02494679222527231 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.02563082497562135, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.02563082497562135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518752, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518752 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.018904164171510213, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.018904164171510213 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.026415601914389, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.026415601914389 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4380165289256198, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.0387813988879761, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.0387813988879761 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275915, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275915 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.037709700493470166, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.037709700493470166 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1875, + "acc_stderr": 0.023709788253811766, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.023709788253811766 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.03078905113903081, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.03078905113903081 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803548, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803548 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145628, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145628 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391244, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391244 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237035, + "mc2": 0.4373029262876568, + "mc2_stderr": 0.015588306319483176 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.015141752199573201, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.016272952997019124 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-llama2-finetune-ex3", + "model_sha": "013b64f9d7f8155d95fedc7a859df06ae0c4fce9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-llama2-finetune-ex4/result_2023-10-31 03:26:06.json b/cepiloth/ko-llama2-finetune-ex4/result_2023-10-31 03:26:06.json new file mode 100644 index 0000000000000000000000000000000000000000..0ddcc3285a4d493c456c2978d7c87d3b44031ebd --- /dev/null +++ b/cepiloth/ko-llama2-finetune-ex4/result_2023-10-31 03:26:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.013057169655761836, + "acc_norm": 0.310580204778157, + "acc_norm_stderr": 0.013522292098053057 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3202549292969528, + "acc_stderr": 0.004656208951541443, + "acc_norm": 0.37582154949213303, + "acc_norm_stderr": 0.004833444556338622 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.03743979825926399, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.03743979825926399 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3001277139208174, + "acc_stderr": 0.016389249691317425, + "acc_norm": 0.3001277139208174, + "acc_norm_stderr": 0.016389249691317425 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.028020226271200217, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.028020226271200217 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.0332939411907353, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.0332939411907353 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.025403832978179604, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.025403832978179604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.24663677130044842, + "acc_stderr": 0.028930413120910874, + "acc_norm": 0.24663677130044842, + "acc_norm_stderr": 0.028930413120910874 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.038552896163789485, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.038552896163789485 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.02894200404099817, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.02894200404099817 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.02160629449464773, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.02160629449464773 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.04373313040914761, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.04373313040914761 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144444, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144444 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594525, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594525 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741706, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741706 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02606715922227579, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02606715922227579 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594316, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.02167921966369317, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.02167921966369317 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.024257901705323378, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.024257901705323378 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.024383665531035454, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.024383665531035454 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27155963302752295, + "acc_stderr": 0.019069098363191452, + "acc_norm": 0.27155963302752295, + "acc_norm_stderr": 0.019069098363191452 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.02600480036395211, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.02600480036395211 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228732, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228732 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25326797385620914, + "acc_stderr": 0.017593486895366835, + "acc_norm": 0.25326797385620914, + "acc_norm_stderr": 0.017593486895366835 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.02769691071309394, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.02769691071309394 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983566, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983566 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.030299506562154178, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.030299506562154178 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.19831223628691982, + "acc_stderr": 0.025955020841621112, + "acc_norm": 0.19831223628691982, + "acc_norm_stderr": 0.025955020841621112 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2620599739243807, + "acc_stderr": 0.011231552795890392, + "acc_norm": 0.2620599739243807, + "acc_norm_stderr": 0.011231552795890392 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009181, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834564, + "mc2": 0.42970330311039423, + "mc2_stderr": 0.01625558814144742 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2514757969303424, + "acc_stderr": 0.014916462437232256, + "acc_norm": 0.29043683589138136, + "acc_norm_stderr": 0.01560760256981463 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-llama2-finetune-ex4", + "model_sha": "c368a2162df72c2310144879432d508736a16e90", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-llama2-finetune-ex5/result_2023-10-31 11:01:48.json b/cepiloth/ko-llama2-finetune-ex5/result_2023-10-31 11:01:48.json new file mode 100644 index 0000000000000000000000000000000000000000..d0e09dd576e3c40a9ecb90db174f044698f83b9f --- /dev/null +++ b/cepiloth/ko-llama2-finetune-ex5/result_2023-10-31 11:01:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2636518771331058, + "acc_stderr": 0.012875929151297065, + "acc_norm": 0.3122866894197952, + "acc_norm_stderr": 0.013542598541688065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33100975901214896, + "acc_stderr": 0.004696148339570981, + "acc_norm": 0.4099780920135431, + "acc_norm_stderr": 0.004908241354310212 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36398467432950193, + "acc_stderr": 0.017205684809032232, + "acc_norm": 0.36398467432950193, + "acc_norm_stderr": 0.017205684809032232 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071857, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071857 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36012861736334406, + "acc_stderr": 0.02726429759980402, + "acc_norm": 0.36012861736334406, + "acc_norm_stderr": 0.02726429759980402 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.04243869242230524, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.04243869242230524 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424387, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424387 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3319327731092437, + "acc_stderr": 0.030588697013783663, + "acc_norm": 0.3319327731092437, + "acc_norm_stderr": 0.030588697013783663 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2794871794871795, + "acc_stderr": 0.022752388839776823, + "acc_norm": 0.2794871794871795, + "acc_norm_stderr": 0.022752388839776823 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3419354838709677, + "acc_stderr": 0.02698528957655273, + "acc_norm": 0.3419354838709677, + "acc_norm_stderr": 0.02698528957655273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.44017094017094016, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.44017094017094016, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.028254200344438672, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.028254200344438672 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094528, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094528 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918428, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918428 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.025624723994030454, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.025624723994030454 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.02640614597362568, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.02640614597362568 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32124352331606215, + "acc_stderr": 0.03369950868549068, + "acc_norm": 0.32124352331606215, + "acc_norm_stderr": 0.03369950868549068 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3211009174311927, + "acc_stderr": 0.020018149772733744, + "acc_norm": 0.3211009174311927, + "acc_norm_stderr": 0.020018149772733744 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.027184498909941613, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.027184498909941613 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.018373116915903966, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.018373116915903966 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467764, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467764 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.025187786660227248, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.025187786660227248 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31223628691983124, + "acc_stderr": 0.030165137867847, + "acc_norm": 0.31223628691983124, + "acc_norm_stderr": 0.030165137867847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25097783572359844, + "acc_stderr": 0.011073730299187224, + "acc_norm": 0.25097783572359844, + "acc_norm_stderr": 0.011073730299187224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268049, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268049 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.0158663464013843, + "mc2": 0.4504635842487325, + "mc2_stderr": 0.01536359300418303 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27036599763872493, + "acc_stderr": 0.015270152942068406, + "acc_norm": 0.35182998819362454, + "acc_norm_stderr": 0.016418206451218057 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-llama2-finetune-ex5", + "model_sha": "72d3e9fcbf33373b484f2beb26751ac0bf06af65", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chargoddard/Yi-6B-Llama/result_2023-11-21 00:54:55.json b/chargoddard/Yi-6B-Llama/result_2023-11-21 00:54:55.json new file mode 100644 index 0000000000000000000000000000000000000000..7c6735232d17805f6385a5d96ff8364b5575254d --- /dev/null +++ b/chargoddard/Yi-6B-Llama/result_2023-11-21 00:54:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2175767918088737, + "acc_stderr": 0.012057262020972502, + "acc_norm": 0.2627986348122867, + "acc_norm_stderr": 0.012862523175351331 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3028281218880701, + "acc_stderr": 0.004585424513012102, + "acc_norm": 0.35082652857996416, + "acc_norm_stderr": 0.004762534245488401 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39208173690932313, + "acc_stderr": 0.017458524050147636, + "acc_norm": 0.39208173690932313, + "acc_norm_stderr": 0.017458524050147636 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.031565646822367836, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.031565646822367836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.03526552724601199, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.03526552724601199 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34102564102564104, + "acc_stderr": 0.02403548967633507, + "acc_norm": 0.34102564102564104, + "acc_norm_stderr": 0.02403548967633507 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36129032258064514, + "acc_stderr": 0.02732754844795754, + "acc_norm": 0.36129032258064514, + "acc_norm_stderr": 0.02732754844795754 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3660377358490566, + "acc_stderr": 0.029647813539365263, + "acc_norm": 0.3660377358490566, + "acc_norm_stderr": 0.029647813539365263 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.04653429807913508, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.04653429807913508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871937, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871937 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.02475747390275205, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.02475747390275205 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261837, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261837 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.027237415094592474, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.027237415094592474 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3724770642201835, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.3724770642201835, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.018850084696468712, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.018850084696468712 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.029049190342543465, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.029049190342543465 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098426, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841196, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786713, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786713 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.01607750926613303, + "mc2": 0.4750668989915785, + "mc2_stderr": 0.015774112289507786 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3270365997638725, + "acc_stderr": 0.016129047485457022, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.016819438642971408 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chargoddard/Yi-6B-Llama", + "model_sha": "282ad3e8502e1830c466dd75601af816a43b8bcf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/choco9966/Llama-2-7b-instruct-tuning/result_2023-10-19 08:44:42.json b/choco9966/Llama-2-7b-instruct-tuning/result_2023-10-19 08:44:42.json new file mode 100644 index 0000000000000000000000000000000000000000..76e515b84b20c9180028f7029ba9af5f00570cdb --- /dev/null +++ b/choco9966/Llama-2-7b-instruct-tuning/result_2023-10-19 08:44:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29692832764505117, + "acc_stderr": 0.013352025976725222, + "acc_norm": 0.33447098976109213, + "acc_norm_stderr": 0.01378746032244138 + }, + "harness|ko_hellaswag|10": { + "acc": 0.345947022505477, + "acc_stderr": 0.004747038768172532, + "acc_norm": 0.4251145190201155, + "acc_norm_stderr": 0.004933500261683597 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3895274584929757, + "acc_stderr": 0.017438082556264594, + "acc_norm": 0.3895274584929757, + "acc_norm_stderr": 0.017438082556264594 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771124, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771124 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.03148955829745529, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.03148955829745529 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.03550920185689629, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.03550920185689629 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.02764814959975147, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.02764814959975147 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.040287315329475604, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.040287315329475604 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.030684737115135377, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.030684737115135377 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.023454674889404288, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.023454674889404288 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.047128212574267705, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.047128212574267705 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.0314471258167824, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.0314471258167824 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.027273890594300642, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.027273890594300642 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176095, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176095 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.0236369759961018, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.0236369759961018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3439306358381503, + "acc_stderr": 0.025574123786546648, + "acc_norm": 0.3439306358381503, + "acc_norm_stderr": 0.025574123786546648 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.027044538138402616, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.027044538138402616 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3412844036697248, + "acc_stderr": 0.020328612816592432, + "acc_norm": 0.3412844036697248, + "acc_norm_stderr": 0.020328612816592432 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013315, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013315 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.018433427649401896, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.018433427649401896 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460987, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460987 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.01444415780826145, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.01444415780826145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.028064998167040094, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.028064998167040094 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35443037974683544, + "acc_stderr": 0.031137304297185798, + "acc_norm": 0.35443037974683544, + "acc_norm_stderr": 0.031137304297185798 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2627118644067797, + "acc_stderr": 0.011240545514995669, + "acc_norm": 0.2627118644067797, + "acc_norm_stderr": 0.011240545514995669 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.032702871814820816, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.032702871814820816 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.016466769613698293, + "mc2": 0.5139753799906011, + "mc2_stderr": 0.016082624616035393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2585596221959858, + "acc_stderr": 0.015053354438964, + "acc_norm": 0.29161747343565525, + "acc_norm_stderr": 0.01562627669007024 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "choco9966/Llama-2-7b-instruct-tuning", + "model_sha": "0914768714fca5e74eef736b357d9f82ccc9e089", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/daekeun-ml/Llama-2-ko-DPO-13B/result_2023-10-31 08:54:20.json b/daekeun-ml/Llama-2-ko-DPO-13B/result_2023-10-31 08:54:20.json new file mode 100644 index 0000000000000000000000000000000000000000..f409b1bb657917939713fb28400224191158c06c --- /dev/null +++ b/daekeun-ml/Llama-2-ko-DPO-13B/result_2023-10-31 08:54:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43600682593856654, + "acc_stderr": 0.014491225699230916, + "acc_norm": 0.47525597269624575, + "acc_norm_stderr": 0.01459348769493774 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4387572196773551, + "acc_stderr": 0.004952209831856584, + "acc_norm": 0.5827524397530373, + "acc_norm_stderr": 0.004920967192255291 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865636, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865636 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101736, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101736 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126167, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.03125610824421881, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.03125610824421881 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.037507570448955384, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.037507570448955384 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02306818884826111, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02306818884826111 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5779816513761468, + "acc_stderr": 0.021174991407763178, + "acc_norm": 0.5779816513761468, + "acc_norm_stderr": 0.021174991407763178 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147127, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147127 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.027732834353363954, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.027732834353363954 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402543, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402543 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.03093285879278984, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.03093285879278984 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353593, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353593 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070264, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070264 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3684210526315789, + "mc1_stderr": 0.016886551261046046, + "mc2": 0.5190921371587374, + "mc2_stderr": 0.015978390538660552 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.538370720188902, + "acc_norm_stderr": 0.017139660221845557 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "daekeun-ml/Llama-2-ko-DPO-13B", + "model_sha": "dba5dd11263b1b42fa7d904d627f41d47330317b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/daekeun-ml/Llama-2-ko-OpenOrca-gugugo-13B/result_2023-11-14 01:13:56.json b/daekeun-ml/Llama-2-ko-OpenOrca-gugugo-13B/result_2023-11-14 01:13:56.json new file mode 100644 index 0000000000000000000000000000000000000000..f2fbe567f38ed549276f17ed20adf9249800886c --- /dev/null +++ b/daekeun-ml/Llama-2-ko-OpenOrca-gugugo-13B/result_2023-11-14 01:13:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3370307167235495, + "acc_stderr": 0.013813476652902276, + "acc_norm": 0.3967576791808874, + "acc_norm_stderr": 0.014296513020180637 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3999203345947023, + "acc_stderr": 0.0048888050031030755, + "acc_norm": 0.5243975303724357, + "acc_norm_stderr": 0.004983837641502893 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.049486373240266356, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.049486373240266356 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5134099616858238, + "acc_stderr": 0.017873531736510365, + "acc_norm": 0.5134099616858238, + "acc_norm_stderr": 0.017873531736510365 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.028150232244535594, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.028150232244535594 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03547601494006936, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03547601494006936 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.02483881198803316, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02483881198803316 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.03222414045241107, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.03222414045241107 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261107, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261107 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.0357795481394837, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.0357795481394837 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.021424291871853157, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.021424291871853157 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.019249785691717217, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.019249785691717217 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.031546962856566295, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.031546962856566295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.03141470802586589, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.03141470802586589 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.03195514741370671, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.03195514741370671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214941, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214941 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.4200527342817689, + "mc2_stderr": 0.01514912154156884 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36717827626918537, + "acc_stderr": 0.016572727807458595, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.017182864434998574 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "daekeun-ml/Llama-2-ko-OpenOrca-gugugo-13B", + "model_sha": "4a8383dc00731b8d09cec6d4f48eba631833b445", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/daekeun-ml/Llama-2-ko-instruct-13B/result_2023-10-29 16:17:31.json b/daekeun-ml/Llama-2-ko-instruct-13B/result_2023-10-29 16:17:31.json new file mode 100644 index 0000000000000000000000000000000000000000..f741ff23c1386332442b4778e58e6c4945c6a1b9 --- /dev/null +++ b/daekeun-ml/Llama-2-ko-instruct-13B/result_2023-10-29 16:17:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.014312094557946704, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.01457558392201967 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4229237203744274, + "acc_stderr": 0.004930138842768219, + "acc_norm": 0.5690101573391755, + "acc_norm_stderr": 0.004942026200279584 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.017747874245683602, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.017747874245683602 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5743119266055046, + "acc_stderr": 0.021199235972470795, + "acc_norm": 0.5743119266055046, + "acc_norm_stderr": 0.021199235972470795 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.02811092849280908, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.02811092849280908 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.01969145905235416, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.01969145905235416 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.03018753206032938, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.03018753206032938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3474576271186441, + "acc_stderr": 0.012161417729749806, + "acc_norm": 0.3474576271186441, + "acc_norm_stderr": 0.012161417729749806 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283344, + "mc2": 0.4199929776899167, + "mc2_stderr": 0.014679195459056854 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.017188329219654276, + "acc_norm": 0.5844155844155844, + "acc_norm_stderr": 0.016943586313076565 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "daekeun-ml/Llama-2-ko-instruct-13B", + "model_sha": "a29fb540227b3cbc88a308cc5ed62c26b28d84f1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dasomysky/unv_v0.1.0/result_2023-12-19 10:08:30.json b/dasomysky/unv_v0.1.0/result_2023-12-19 10:08:30.json new file mode 100644 index 0000000000000000000000000000000000000000..c2fe2afb76080ca8f541da5ff3fc49e6a39df854 --- /dev/null +++ b/dasomysky/unv_v0.1.0/result_2023-12-19 10:08:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4104095563139932, + "acc_stderr": 0.014374922192642667, + "acc_norm": 0.4735494880546075, + "acc_norm_stderr": 0.014590931358120179 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3721370244971121, + "acc_stderr": 0.0048238677613324675, + "acc_norm": 0.4676359290977893, + "acc_norm_stderr": 0.004979317515432532 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693353, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236785, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236785 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079021, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079021 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894262, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894262 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.03515520728670417, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.03515520728670417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686856, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686856 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.03057281131029961, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.03057281131029961 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.029773082713319875, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.029773082713319875 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.024870815251057093, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.024870815251057093 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.02758600622160771, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.02758600622160771 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583703, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583703 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529428, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529428 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.019780465954777535, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.019780465954777535 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.02878222756134725, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.02878222756134725 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.045723723587374296, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.045723723587374296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2, + "acc_stderr": 0.013378001241813053, + "acc_norm": 0.2, + "acc_norm_stderr": 0.013378001241813053 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841195, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163906, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3389830508474576, + "acc_stderr": 0.01208994185758447, + "acc_norm": 0.3389830508474576, + "acc_norm_stderr": 0.01208994185758447 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.01613222972815506, + "mc2": 0.4566409454989933, + "mc2_stderr": 0.016796069345486716 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.37662337662337664, + "acc_stderr": 0.016658799874051975, + "acc_norm": 0.3919716646989374, + "acc_norm_stderr": 0.016784332119424077 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dasomysky/unv_v0.1.0", + "model_sha": "5c1f11f93821e38bbb9245a2f6713e0fd421edf3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dasomysky/unv_v0.1.2/result_2023-12-28 04:44:41.json b/dasomysky/unv_v0.1.2/result_2023-12-28 04:44:41.json new file mode 100644 index 0000000000000000000000000000000000000000..2f011b5fe9d1a71004d267dc51bb1dc375935182 --- /dev/null +++ b/dasomysky/unv_v0.1.2/result_2023-12-28 04:44:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.181740614334471, + "acc_stderr": 0.011269198948880236, + "acc_norm": 0.2431740614334471, + "acc_norm_stderr": 0.012536554144587096 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2529376618203545, + "acc_stderr": 0.004338071318912315, + "acc_norm": 0.25184226249751046, + "acc_norm_stderr": 0.004331840012787854 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.03158149539338734, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.03158149539338734 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.03047297336338005, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.03047297336338005 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.02540383297817961, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.02540383297817961 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.038808483010823944, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.038808483010823944 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.028606204289229872, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.028606204289229872 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.02720537153827948, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.02720537153827948 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24516129032258063, + "acc_stderr": 0.024472243840895514, + "acc_norm": 0.24516129032258063, + "acc_norm_stderr": 0.024472243840895514 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914404, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.02661648298050171, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.02661648298050171 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473836, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473836 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772426, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772426 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20207253886010362, + "acc_stderr": 0.02897908979429673, + "acc_norm": 0.20207253886010362, + "acc_norm_stderr": 0.02897908979429673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.0182240781172991, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.0182240781172991 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.040261875275912046, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.040261875275912046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.017776947157528034, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.017776947157528034 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642973, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642973 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1574074074074074, + "acc_stderr": 0.024837173518242384, + "acc_norm": 0.1574074074074074, + "acc_norm_stderr": 0.024837173518242384 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859936, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859936 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19117647058823528, + "acc_stderr": 0.023886881922440335, + "acc_norm": 0.19117647058823528, + "acc_norm_stderr": 0.023886881922440335 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.16326530612244897, + "acc_stderr": 0.023661699177098604, + "acc_norm": 0.16326530612244897, + "acc_norm_stderr": 0.023661699177098604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24119947848761408, + "acc_stderr": 0.010926496102034954, + "acc_norm": 0.24119947848761408, + "acc_norm_stderr": 0.010926496102034954 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2107843137254902, + "acc_stderr": 0.028626547912437406, + "acc_norm": 0.2107843137254902, + "acc_norm_stderr": 0.028626547912437406 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752325, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09327036599763873, + "acc_stderr": 0.00999828619027671, + "acc_norm": 0.31286894923258557, + "acc_norm_stderr": 0.015941010118302658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dasomysky/unv_v0.1.2", + "model_sha": "0362c3851124321261564d6aa05b1e0b647d63c4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/komt-mistral-7b-v1/result_2023-11-02 05:58:27.json b/davidkim205/komt-mistral-7b-v1/result_2023-11-02 05:58:27.json new file mode 100644 index 0000000000000000000000000000000000000000..5e659ed65bc346ebeba62fca8a3ebc5b8db43dcf --- /dev/null +++ b/davidkim205/komt-mistral-7b-v1/result_2023-11-02 05:58:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3216723549488055, + "acc_stderr": 0.013650488084494164, + "acc_norm": 0.3660409556313993, + "acc_norm_stderr": 0.014077223108470142 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3586934873531169, + "acc_stderr": 0.004786368011500455, + "acc_norm": 0.46016729735112527, + "acc_norm_stderr": 0.004973922192982241 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465564, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465564 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745633, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745633 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085328, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085328 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.03169380235712997, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.03169380235712997 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3871794871794872, + "acc_stderr": 0.024697216930878948, + "acc_norm": 0.3871794871794872, + "acc_norm_stderr": 0.024697216930878948 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.02786932057166464, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02786932057166464 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006114, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006114 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972742, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972742 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176095, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176095 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123936, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123936 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247079, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247079 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047732, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047732 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357334, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261743, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261743 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02712511551316686, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02712511551316686 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.021244146569074338, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.021244146569074338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883034, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883034 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.018950886770806297, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.018950886770806297 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169924, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169924 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2916201117318436, + "acc_stderr": 0.015201032512520429, + "acc_norm": 0.2916201117318436, + "acc_norm_stderr": 0.015201032512520429 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.028064998167040094, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.028064998167040094 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.03254462010767859, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.03254462010767859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2848761408083442, + "acc_stderr": 0.01152783084636902, + "acc_norm": 0.2848761408083442, + "acc_norm_stderr": 0.01152783084636902 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.03228210387037892, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.03228210387037892 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391243, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391243 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589667, + "mc2": 0.47070833796075856, + "mc2_stderr": 0.015435009084049225 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41086186540731995, + "acc_stderr": 0.016914972767841062, + "acc_norm": 0.48288075560802834, + "acc_norm_stderr": 0.017180275246085622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/komt-mistral-7b-v1", + "model_sha": "feb41a27b8dafcc6912185ff9d8da66951ca5758", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/ku-mistral-7b-PGO-v1/result_2023-11-08 02:18:33.json b/devhyun88/ku-mistral-7b-PGO-v1/result_2023-11-08 02:18:33.json new file mode 100644 index 0000000000000000000000000000000000000000..e9e4a8d3865d998fb55ac5d5348c36daa3207cc4 --- /dev/null +++ b/devhyun88/ku-mistral-7b-PGO-v1/result_2023-11-08 02:18:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40187713310580203, + "acc_stderr": 0.014327268614578276, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.014537144444284741 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40748854809798846, + "acc_stderr": 0.004903628887264535, + "acc_norm": 0.5290778729336786, + "acc_norm_stderr": 0.004981336318033644 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5363984674329502, + "acc_stderr": 0.017832524079593258, + "acc_norm": 0.5363984674329502, + "acc_norm_stderr": 0.017832524079593258 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863537, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863537 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883231, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883231 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942645, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404904, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404904 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539284, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539284 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.02861462475280544, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.02861462475280544 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.019886221037501862, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.019886221037501862 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.01467625200931947, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.01467625200931947 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.01223861575031651, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.01223861575031651 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394812, + "mc2": 0.45493566764145105, + "mc2_stderr": 0.01570789472718274 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41912632821723733, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.4427390791027155, + "acc_norm_stderr": 0.01707725413155622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/ku-mistral-7b-PGO-v1", + "model_sha": "a4f1f7057b91704e9a3328beb2f95ff460339b53", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/ku-mistral-7b-PGO-v2/result_2023-11-13 01:55:37.json b/devhyun88/ku-mistral-7b-PGO-v2/result_2023-11-13 01:55:37.json new file mode 100644 index 0000000000000000000000000000000000000000..833f2c16848210c64e4075aa24038ebd30b3fae7 --- /dev/null +++ b/devhyun88/ku-mistral-7b-PGO-v2/result_2023-11-13 01:55:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39078498293515357, + "acc_stderr": 0.014258563880513782, + "acc_norm": 0.43600682593856654, + "acc_norm_stderr": 0.014491225699230916 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40460067715594505, + "acc_stderr": 0.004898115110975032, + "acc_norm": 0.5252937661820355, + "acc_norm_stderr": 0.004983392650570965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.524904214559387, + "acc_stderr": 0.017857770704901035, + "acc_norm": 0.524904214559387, + "acc_norm_stderr": 0.017857770704901035 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562804, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562804 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851105, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851105 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5247706422018349, + "acc_stderr": 0.021410999753635914, + "acc_norm": 0.5247706422018349, + "acc_norm_stderr": 0.021410999753635914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.043758884927270605, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.043758884927270605 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.0199221156827867, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.0199221156827867 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3340782122905028, + "acc_stderr": 0.015774911422381622, + "acc_norm": 0.3340782122905028, + "acc_norm_stderr": 0.015774911422381622 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125468, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125468 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353595, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353595 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.4557831168873756, + "mc2_stderr": 0.01553351013285851 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42384887839433294, + "acc_stderr": 0.01698981083462826, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.017161563949916345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/ku-mistral-7b-PGO-v2", + "model_sha": "c60fc585fc6621d322bb309a3f6d3763c2409fa6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/ku-mistral-7b-PGO-v3/result_2023-11-20 02:12:33.json b/devhyun88/ku-mistral-7b-PGO-v3/result_2023-11-20 02:12:33.json new file mode 100644 index 0000000000000000000000000000000000000000..8ce4694c1ff90649fa0c03efb2eea91e6d99749f --- /dev/null +++ b/devhyun88/ku-mistral-7b-PGO-v3/result_2023-11-20 02:12:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427006 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3736307508464449, + "acc_stderr": 0.004827786289074851, + "acc_norm": 0.47769368651663013, + "acc_norm_stderr": 0.004984813391016205 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4112388250319285, + "acc_stderr": 0.017595971908056576, + "acc_norm": 0.4112388250319285, + "acc_norm_stderr": 0.017595971908056576 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894245, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.40404040404040403, + "acc_stderr": 0.03496130972056129, + "acc_norm": 0.40404040404040403, + "acc_norm_stderr": 0.03496130972056129 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.02466674491518724, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.02466674491518724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536824, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536824 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.028100964724272638, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.028100964724272638 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.02898545565233439, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.02898545565233439 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762602, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554859, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554859 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.026394104177643627, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.026394104177643627 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322004, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322004 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3908256880733945, + "acc_stderr": 0.020920058346111065, + "acc_norm": 0.3908256880733945, + "acc_norm_stderr": 0.020920058346111065 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617156, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617156 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04545454545454546, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04545454545454546 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.01955964680921593, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.01955964680921593 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553998, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553998 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.0279715413701706, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.0279715413701706 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32333767926988266, + "acc_stderr": 0.011946565758447202, + "acc_norm": 0.32333767926988266, + "acc_norm_stderr": 0.011946565758447202 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.034267123492472705, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.034267123492472705 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.4157388974059479, + "mc2_stderr": 0.015224506818663186 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33766233766233766, + "acc_stderr": 0.016259075784754953, + "acc_norm": 0.3860684769775679, + "acc_norm_stderr": 0.016738130760321747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/ku-mistral-7b-PGO-v3", + "model_sha": "2def3ad0a50695d8b48b9b14350b35f8650a81e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/ku-mistral-7b-PGO-v4/result_2023-11-24 00:42:44.json b/devhyun88/ku-mistral-7b-PGO-v4/result_2023-11-24 00:42:44.json new file mode 100644 index 0000000000000000000000000000000000000000..08a25fd8f3c2c91853b939db358ba67a6cac1993 --- /dev/null +++ b/devhyun88/ku-mistral-7b-PGO-v4/result_2023-11-24 00:42:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40102389078498296, + "acc_stderr": 0.014322255790719872, + "acc_norm": 0.44283276450511944, + "acc_norm_stderr": 0.014515573873348913 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39533957379008167, + "acc_stderr": 0.004879242848473461, + "acc_norm": 0.5145389364668392, + "acc_norm_stderr": 0.004987671478640939 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066165, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066165 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840674, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840674 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461224, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461224 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568385, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009812, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009812 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712177, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712177 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159664, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137282, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137282 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46422018348623856, + "acc_stderr": 0.0213823647757019, + "acc_norm": 0.46422018348623856, + "acc_norm_stderr": 0.0213823647757019 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611327, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611327 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767867, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767867 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.02858270975389844, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.02858270975389844 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786683, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786683 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777298, + "mc2": 0.43552694859151936, + "mc2_stderr": 0.015531533195686252 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38961038961038963, + "acc_stderr": 0.016766161671893504, + "acc_norm": 0.43683589138134593, + "acc_norm_stderr": 0.017052633559856076 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/ku-mistral-7b-PGO-v4", + "model_sha": "e10de879bef89b759447acd6910fab94dc89f750", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/ku-mistral-7b-PGO-v5/result_2023-12-04 06:12:17.json b/devhyun88/ku-mistral-7b-PGO-v5/result_2023-12-04 06:12:17.json new file mode 100644 index 0000000000000000000000000000000000000000..660512d3c86f16d0f5b0b3cd6046be7e248c32db --- /dev/null +++ b/devhyun88/ku-mistral-7b-PGO-v5/result_2023-12-04 06:12:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3003412969283277, + "acc_stderr": 0.013395909309956999, + "acc_norm": 0.35238907849829354, + "acc_norm_stderr": 0.013960142600598675 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35879306910973907, + "acc_stderr": 0.004786660691181924, + "acc_norm": 0.44284007169886475, + "acc_norm_stderr": 0.004957068377516515 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691583, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691583 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36270753512132825, + "acc_stderr": 0.0171927086746023, + "acc_norm": 0.36270753512132825, + "acc_norm_stderr": 0.0171927086746023 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.036471685236832266, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.036471685236832266 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.026664410886937613, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.026664410886937613 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136088, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.021278393863586282, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.021278393863586282 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567104, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567104 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5, + "acc_stderr": 0.03275608910402091, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03275608910402091 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191179, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191179 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02351729433596329, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02351729433596329 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.034370793441061344, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.034370793441061344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36127167630057805, + "acc_stderr": 0.025862201852277902, + "acc_norm": 0.36127167630057805, + "acc_norm_stderr": 0.025862201852277902 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.026571483480719967, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.026571483480719967 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3339449541284404, + "acc_stderr": 0.020220554196736407, + "acc_norm": 0.3339449541284404, + "acc_norm_stderr": 0.020220554196736407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.02685729466328141, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.02685729466328141 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.018492596536396955, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.018492596536396955 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.02804594694204241, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.02804594694204241 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.0302252261600124, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.0302252261600124 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850412, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850412 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329387, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329387 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960238, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960238 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4008438818565401, + "acc_stderr": 0.03190080389473236, + "acc_norm": 0.4008438818565401, + "acc_norm_stderr": 0.03190080389473236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845542, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845542 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.03198001660115071, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.03198001660115071 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.01489627744104187, + "mc2": 0.4111411666560298, + "mc2_stderr": 0.015517895415930608 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31286894923258557, + "acc_stderr": 0.01594101011830266, + "acc_norm": 0.42502951593860683, + "acc_norm_stderr": 0.016996016308362883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/ku-mistral-7b-PGO-v5", + "model_sha": "155245510057950127db75bedc014d1a144add5a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/kullama2-7b-ko-PGO/result_2023-11-02 02:24:24.json b/devhyun88/kullama2-7b-ko-PGO/result_2023-11-02 02:24:24.json new file mode 100644 index 0000000000000000000000000000000000000000..12d30ac2dba1ef4b2350fc0560f454becd6fb3ac --- /dev/null +++ b/devhyun88/kullama2-7b-ko-PGO/result_2023-11-02 02:24:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.013760988200880533, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892887 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40290778729336785, + "acc_stderr": 0.0048948011198986134, + "acc_norm": 0.5275841465843457, + "acc_norm_stderr": 0.00498218232392356 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.388250319284802, + "acc_stderr": 0.017427673295544326, + "acc_norm": 0.388250319284802, + "acc_norm_stderr": 0.017427673295544326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36012861736334406, + "acc_stderr": 0.027264297599804015, + "acc_norm": 0.36012861736334406, + "acc_norm_stderr": 0.027264297599804015 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.32323232323232326, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.32323232323232326, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3025210084033613, + "acc_stderr": 0.029837962388291926, + "acc_norm": 0.3025210084033613, + "acc_norm_stderr": 0.029837962388291926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.021992016662370554, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.021992016662370554 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297697, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297697 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.29354838709677417, + "acc_stderr": 0.025906087021319295, + "acc_norm": 0.29354838709677417, + "acc_norm_stderr": 0.025906087021319295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.02898545565233439, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.02898545565233439 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.32338308457711445, + "acc_stderr": 0.03307615947979033, + "acc_norm": 0.32338308457711445, + "acc_norm_stderr": 0.03307615947979033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.0220190800122179, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.0220190800122179 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566016, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.34971098265895956, + "acc_stderr": 0.025674281456531018, + "acc_norm": 0.34971098265895956, + "acc_norm_stderr": 0.025674281456531018 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.0360251131880677, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.0360251131880677 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.025842248700902175, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.025842248700902175 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.03161877917935411, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.03161877917935411 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.344954128440367, + "acc_stderr": 0.020380605405066952, + "acc_norm": 0.344954128440367, + "acc_norm_stderr": 0.020380605405066952 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242515, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242515 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.0275300784471103, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.0275300784471103 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.04537935177947879, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.04537935177947879 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.034260594244031654, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.034260594244031654 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.272875816993464, + "acc_stderr": 0.018020474148393577, + "acc_norm": 0.272875816993464, + "acc_norm_stderr": 0.018020474148393577 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467763, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467763 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046955, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046955 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144696, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144696 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3881856540084388, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.3881856540084388, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2737940026075619, + "acc_stderr": 0.011388612167979392, + "acc_norm": 0.2737940026075619, + "acc_norm_stderr": 0.011388612167979392 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.031145570659486782, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.031145570659486782 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253597, + "mc2": 0.40682664044126005, + "mc2_stderr": 0.014892332644374185 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3293978748524203, + "acc_stderr": 0.016158746868147143, + "acc_norm": 0.44510035419126326, + "acc_norm_stderr": 0.01708641743100547 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/kullama2-7b-ko-PGO", + "model_sha": "63bbaf7382147cfaaee56a7c3126413288520e9c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/kullama2-7b-platypus-kogpt4/result_2023-10-30 07:47:57.json b/devhyun88/kullama2-7b-platypus-kogpt4/result_2023-10-30 07:47:57.json new file mode 100644 index 0000000000000000000000000000000000000000..a17b888ca841ba98d07794de038e0a87ee94ce50 --- /dev/null +++ b/devhyun88/kullama2-7b-platypus-kogpt4/result_2023-10-30 07:47:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3430034129692833, + "acc_stderr": 0.013872423223718173, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.01429122839353659 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4023102967536347, + "acc_stderr": 0.00489361701497531, + "acc_norm": 0.5319657438757219, + "acc_norm_stderr": 0.0049795737655758615 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37292464878671777, + "acc_stderr": 0.01729286826945392, + "acc_norm": 0.37292464878671777, + "acc_norm_stderr": 0.01729286826945392 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071856, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071856 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3504823151125402, + "acc_stderr": 0.027098652621301747, + "acc_norm": 0.3504823151125402, + "acc_norm_stderr": 0.027098652621301747 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267042, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267042 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.03695183311650232, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.03695183311650232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416545, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416545 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2689075630252101, + "acc_stderr": 0.02880139219363128, + "acc_norm": 0.2689075630252101, + "acc_norm_stderr": 0.02880139219363128 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971527, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971527 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255389, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255389 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4017094017094017, + "acc_stderr": 0.03211693751051622, + "acc_norm": 0.4017094017094017, + "acc_norm_stderr": 0.03211693751051622 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3283018867924528, + "acc_stderr": 0.02890159361241178, + "acc_norm": 0.3283018867924528, + "acc_norm_stderr": 0.02890159361241178 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507383, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507383 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.035118075718047245, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.035118075718047245 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3482587064676617, + "acc_stderr": 0.03368787466115459, + "acc_norm": 0.3482587064676617, + "acc_norm_stderr": 0.03368787466115459 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415415, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415415 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080343, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080343 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3265895953757225, + "acc_stderr": 0.025248264774242832, + "acc_norm": 0.3265895953757225, + "acc_norm_stderr": 0.025248264774242832 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32098765432098764, + "acc_stderr": 0.02597656601086274, + "acc_norm": 0.32098765432098764, + "acc_norm_stderr": 0.02597656601086274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28623853211009176, + "acc_stderr": 0.019379436628919975, + "acc_norm": 0.28623853211009176, + "acc_norm_stderr": 0.019379436628919975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.027184498909941616, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.027184498909941616 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.034260594244031654, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.034260594244031654 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.018249024411207664, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.018249024411207664 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461004, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461004 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626964, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626964 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.025035845227711254, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.025035845227711254 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0289205832206756, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0289205832206756 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3755274261603376, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.3755274261603376, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2816166883963494, + "acc_stderr": 0.011487783272786694, + "acc_norm": 0.2816166883963494, + "acc_norm_stderr": 0.011487783272786694 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268049, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268049 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476189, + "mc2": 0.4026846131079194, + "mc2_stderr": 0.014939937441482552 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31641086186540734, + "acc_stderr": 0.015989617951065477, + "acc_norm": 0.4639905548996458, + "acc_norm_stderr": 0.017145715365486654 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/kullama2-7b-platypus-kogpt4", + "model_sha": "033fb6e8db347530e49449d888d780b777e48715", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dltjdgh0928/lsh_finetune_v0.11/result_2023-11-01 01:32:31.json b/dltjdgh0928/lsh_finetune_v0.11/result_2023-11-01 01:32:31.json new file mode 100644 index 0000000000000000000000000000000000000000..2febda3da4f26acd564f4e72838e466c2df17dd4 --- /dev/null +++ b/dltjdgh0928/lsh_finetune_v0.11/result_2023-11-01 01:32:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.01365998089427737, + "acc_norm": 0.3703071672354949, + "acc_norm_stderr": 0.01411129875167495 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36058554072893845, + "acc_stderr": 0.004791890625834196, + "acc_norm": 0.4471220872336188, + "acc_norm_stderr": 0.004961799358836431 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.017612204084663775, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.017612204084663775 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357766, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357766 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.39228295819935693, + "acc_stderr": 0.027731258647011994, + "acc_norm": 0.39228295819935693, + "acc_norm_stderr": 0.027731258647011994 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110656, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110656 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032495, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032495 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670238, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670238 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137288, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137288 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.02686462436675664, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.02686462436675664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271233, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271233 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3963302752293578, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.3963302752293578, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557836, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557836 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283683, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283683 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.01911721391149516, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.01911721391149516 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169927, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169927 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.027971541370170598, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.027971541370170598 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.031067211262872492, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.031067211262872492 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.288135593220339, + "acc_stderr": 0.011567140661324563, + "acc_norm": 0.288135593220339, + "acc_norm_stderr": 0.011567140661324563 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247272, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247272 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559696, + "mc2": 0.5058382452993124, + "mc2_stderr": 0.015661402852943502 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4002361275088548, + "acc_stderr": 0.016844693510505056, + "acc_norm": 0.4911452184179457, + "acc_norm_stderr": 0.01718765819933673 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dltjdgh0928/lsh_finetune_v0.11", + "model_sha": "37760736eef6004ed416dd27ffaaad7cfe5da106", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dltjdgh0928/mistral_open_orca_ko/result_2023-10-30 07:56:53.json b/dltjdgh0928/mistral_open_orca_ko/result_2023-10-30 07:56:53.json new file mode 100644 index 0000000000000000000000000000000000000000..ca36ec701cb5872befbbd7cccda9ef9488aef7d5 --- /dev/null +++ b/dltjdgh0928/mistral_open_orca_ko/result_2023-10-30 07:56:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2832764505119454, + "acc_stderr": 0.013167478735134575, + "acc_norm": 0.33447098976109213, + "acc_norm_stderr": 0.013787460322441372 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33260306711810395, + "acc_stderr": 0.004701828071992634, + "acc_norm": 0.4108743278231428, + "acc_norm_stderr": 0.00490987000638884 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37420178799489145, + "acc_stderr": 0.017304805072252044, + "acc_norm": 0.37420178799489145, + "acc_norm_stderr": 0.017304805072252044 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785137, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785137 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824664, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824664 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.027950481494401273, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.027950481494401273 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008732, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008732 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808779, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808779 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236153, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236153 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3435897435897436, + "acc_stderr": 0.024078696580635463, + "acc_norm": 0.3435897435897436, + "acc_norm_stderr": 0.024078696580635463 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.02786932057166464, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02786932057166464 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467506, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467506 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842508, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842508 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.03874102859818083, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.03874102859818083 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.026571483480719964, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.026571483480719964 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4036697247706422, + "acc_stderr": 0.02103570485657497, + "acc_norm": 0.4036697247706422, + "acc_norm_stderr": 0.02103570485657497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.018492596536396955, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.018492596536396955 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460997, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460997 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010085, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010085 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48523206751054854, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.48523206751054854, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29139504563233376, + "acc_stderr": 0.011605720214257612, + "acc_norm": 0.29139504563233376, + "acc_norm_stderr": 0.011605720214257612 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.034107853389047184, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.034107853389047184 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3329253365973072, + "mc1_stderr": 0.016497402382012052, + "mc2": 0.5083138267031554, + "mc2_stderr": 0.015718960507609445 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32113341204250295, + "acc_stderr": 0.016052762579111573, + "acc_norm": 0.4085005903187721, + "acc_norm_stderr": 0.016900062879427115 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dltjdgh0928/mistral_open_orca_ko", + "model_sha": "d8765c261f6eb7b3746e12b7d0c5cba2d0901653", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dltjdgh0928/test_instruction/result_2023-11-01 23:54:10.json b/dltjdgh0928/test_instruction/result_2023-11-01 23:54:10.json new file mode 100644 index 0000000000000000000000000000000000000000..82ebf86c70612d5db6148640f24cb743c2c84093 --- /dev/null +++ b/dltjdgh0928/test_instruction/result_2023-11-01 23:54:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038075, + "acc_norm": 0.4121160409556314, + "acc_norm_stderr": 0.014383915302225402 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3858793069109739, + "acc_stderr": 0.004858074013443988, + "acc_norm": 0.4956184027086238, + "acc_norm_stderr": 0.004989589816180235 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041982, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.035094383488796295, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.035094383488796295 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.025230381238934833, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.025230381238934833 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502737, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718323, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718323 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562413, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562413 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668773, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668773 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.02143555482001308, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.02143555482001308 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.01965992249362334, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.01965992249362334 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639893, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639893 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23016759776536314, + "acc_stderr": 0.01407833925342581, + "acc_norm": 0.23016759776536314, + "acc_norm_stderr": 0.01407833925342581 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898445, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.03190080389473236, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.03190080389473236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31681877444589307, + "acc_stderr": 0.011882349954723016, + "acc_norm": 0.31681877444589307, + "acc_norm_stderr": 0.011882349954723016 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768542, + "mc2": 0.4796330162483247, + "mc2_stderr": 0.015594823470032292 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5230224321133412, + "acc_norm_stderr": 0.017172121546727627 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dltjdgh0928/test_instruction", + "model_sha": "7850d81409e5abbe9170009f0b463eb25042313b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v0.1/result_2023-10-31 10:42:08.json b/eclipsemint/kollama2-7b-v0.1/result_2023-10-31 10:42:08.json new file mode 100644 index 0000000000000000000000000000000000000000..58e5dc8fde8ff97b9873cd27815944a089ebf14f --- /dev/null +++ b/eclipsemint/kollama2-7b-v0.1/result_2023-10-31 10:42:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26791808873720135, + "acc_stderr": 0.012942030195136428, + "acc_norm": 0.31313993174061433, + "acc_norm_stderr": 0.013552671543623503 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3329018123879705, + "acc_stderr": 0.004702886273189405, + "acc_norm": 0.4117705636327425, + "acc_norm_stderr": 0.004911481830909236 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3090676883780332, + "acc_stderr": 0.01652498891970219, + "acc_norm": 0.3090676883780332, + "acc_norm_stderr": 0.01652498891970219 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818784, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818784 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677698, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677698 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.0303137105381989, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.0303137105381989 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.029344572500634363, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.029344572500634363 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.02199201666237056, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.02199201666237056 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.047128212574267705, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.047128212574267705 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534327, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534327 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.032224140452411065, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.032224140452411065 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108614, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108614 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02534809746809783, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02534809746809783 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.03156809362703174, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.03156809362703174 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3265895953757225, + "acc_stderr": 0.025248264774242826, + "acc_norm": 0.3265895953757225, + "acc_norm_stderr": 0.025248264774242826 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.0360251131880677, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.0360251131880677 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.026041766202717167, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.026041766202717167 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518752, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518752 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22935779816513763, + "acc_stderr": 0.018025349724618684, + "acc_norm": 0.22935779816513763, + "acc_norm_stderr": 0.018025349724618684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.02591780611714716, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.02591780611714716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.018635594034423976, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.018635594034423976 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.024562204314142317, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.024562204314142317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789834, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789834 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3206751054852321, + "acc_stderr": 0.030381931949990414, + "acc_norm": 0.3206751054852321, + "acc_norm_stderr": 0.030381931949990414 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26727509778357234, + "acc_stderr": 0.011302607515637528, + "acc_norm": 0.26727509778357234, + "acc_norm_stderr": 0.011302607515637528 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083291, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083291 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091707, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091707 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.01605899902610062, + "mc2": 0.48594348947345256, + "mc2_stderr": 0.015487528453498189 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2680047225501771, + "acc_stderr": 0.015227905796335145, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.016366945603281273 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v0.1", + "model_sha": "875311380804f4022f56d6c45d2bdcee2a899f43", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v0.3/result_2023-11-07 00:36:29.json b/eclipsemint/kollama2-7b-v0.3/result_2023-11-07 00:36:29.json new file mode 100644 index 0000000000000000000000000000000000000000..2cb0628d26030062ec7433749ea4160cd9b6adb0 --- /dev/null +++ b/eclipsemint/kollama2-7b-v0.3/result_2023-11-07 00:36:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2721843003412969, + "acc_stderr": 0.01300660040642371, + "acc_norm": 0.31143344709897613, + "acc_norm_stderr": 0.013532472099850944 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3320055765783708, + "acc_stderr": 0.00469970528097657, + "acc_norm": 0.4071898028281219, + "acc_norm_stderr": 0.004903066639761954 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.32950191570881227, + "acc_stderr": 0.01680832226174045, + "acc_norm": 0.32950191570881227, + "acc_norm_stderr": 0.01680832226174045 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.039446241625011175, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.039446241625011175 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.034605799075530255, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.034605799075530255 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.026311858071854155, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.026311858071854155 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863776, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132368, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132368 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733555, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733555 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.02614868593067175, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.02614868593067175 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.02590789712240817, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.02590789712240817 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804723, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804723 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3582089552238806, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.3582089552238806, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.03156809362703174, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.03156809362703174 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184756, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184756 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.024476994076247333, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.024476994076247333 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868066, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868066 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25688073394495414, + "acc_stderr": 0.01873249292834245, + "acc_norm": 0.25688073394495414, + "acc_norm_stderr": 0.01873249292834245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.026716118380156837, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.026716118380156837 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.036117805602848975, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.036117805602848975 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.018249024411207664, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.018249024411207664 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21323529411764705, + "acc_stderr": 0.02488097151229427, + "acc_norm": 0.21323529411764705, + "acc_norm_stderr": 0.02488097151229427 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.031067211262872475, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.031067211262872475 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3080168776371308, + "acc_stderr": 0.0300523893356057, + "acc_norm": 0.3080168776371308, + "acc_norm_stderr": 0.0300523893356057 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26010430247718386, + "acc_stderr": 0.011204382887823829, + "acc_norm": 0.26010430247718386, + "acc_norm_stderr": 0.011204382887823829 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.4265558352089997, + "mc2_stderr": 0.01527382517262586 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28689492325855964, + "acc_stderr": 0.015550809966781778, + "acc_norm": 0.38961038961038963, + "acc_norm_stderr": 0.0167661616718935 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v0.3", + "model_sha": "1e45ebdd7fe58fa6c62eca0502aef2cf4383336c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v0.4/result_2023-11-16 07:23:45.json b/eclipsemint/kollama2-7b-v0.4/result_2023-11-16 07:23:45.json new file mode 100644 index 0000000000000000000000000000000000000000..425d23fc2f062bf5ab86e0ecbae279197a7c7b05 --- /dev/null +++ b/eclipsemint/kollama2-7b-v0.4/result_2023-11-16 07:23:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2696245733788396, + "acc_stderr": 0.01296804068686916, + "acc_norm": 0.30631399317406144, + "acc_norm_stderr": 0.01347058441727651 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3350926110336586, + "acc_stderr": 0.004710581496639349, + "acc_norm": 0.410973909579765, + "acc_norm_stderr": 0.004910049928688086 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260594, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260594 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3524904214559387, + "acc_stderr": 0.01708415024408138, + "acc_norm": 0.3524904214559387, + "acc_norm_stderr": 0.01708415024408138 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357787, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.035294868015111135, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.035294868015111135 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004903, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004903 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876718, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876718 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.029597329730978107, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.029597329730978107 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204423, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204423 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4188034188034188, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.4188034188034188, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.02674989977124123, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.02674989977124123 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008937, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008937 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4079601990049751, + "acc_stderr": 0.034751163651940926, + "acc_norm": 0.4079601990049751, + "acc_norm_stderr": 0.034751163651940926 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.03368762932259431, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.03368762932259431 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525214, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525214 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869355, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869355 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33236994219653176, + "acc_stderr": 0.025361168749688218, + "acc_norm": 0.33236994219653176, + "acc_norm_stderr": 0.025361168749688218 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.026725868809100786, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.026725868809100786 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29015544041450775, + "acc_stderr": 0.032752644677915166, + "acc_norm": 0.29015544041450775, + "acc_norm_stderr": 0.032752644677915166 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.037752050135836386, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.037752050135836386 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25871559633027524, + "acc_stderr": 0.01877605231961962, + "acc_norm": 0.25871559633027524, + "acc_norm_stderr": 0.01877605231961962 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.027057974624494382, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.027057974624494382 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.0180540274588152, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.0180540274588152 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114023, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114023 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625176, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625176 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935893, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935893 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.030781549102026212, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.030781549102026212 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539258, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539258 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.03182231867647554, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.03182231867647554 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219371, + "mc2": 0.4534153509461654, + "mc2_stderr": 0.015441392201137738 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.25737898465171194, + "acc_stderr": 0.015030899730346752, + "acc_norm": 0.34238488783943327, + "acc_norm_stderr": 0.016313907844146373 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v0.4", + "model_sha": "cc48fd4ee8e59e4d067682819681358e4c265446", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v0/result_2023-10-29 09:14:47.json b/eclipsemint/kollama2-7b-v0/result_2023-10-29 09:14:47.json new file mode 100644 index 0000000000000000000000000000000000000000..5cb01bf42127daf5210b10d29bd553a0b73c8e12 --- /dev/null +++ b/eclipsemint/kollama2-7b-v0/result_2023-10-29 09:14:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26791808873720135, + "acc_stderr": 0.012942030195136421, + "acc_norm": 0.310580204778157, + "acc_norm_stderr": 0.013522292098053057 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33389762995419237, + "acc_stderr": 0.004706398252382464, + "acc_norm": 0.4122684724158534, + "acc_norm_stderr": 0.004912370023913011 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572922, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572922 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34738186462324394, + "acc_stderr": 0.01702667174865573, + "acc_norm": 0.34738186462324394, + "acc_norm_stderr": 0.01702667174865573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761923, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761923 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648022, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648022 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847837, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847837 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.037800192304380135, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.037800192304380135 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671746, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671746 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.03222414045241107, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.03222414045241107 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724057, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724057 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.04653429807913508, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.04653429807913508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.025644108639267624, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.025644108639267624 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.03336767086567977, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.03336767086567977 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.021227082449445045, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.021227082449445045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526502, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526502 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.03487825168497892, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.03487825168497892 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.02577311116963045, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.02577311116963045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24403669724770644, + "acc_stderr": 0.018415286351416416, + "acc_norm": 0.24403669724770644, + "acc_norm_stderr": 0.018415286351416416 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.025829163272757475, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.025829163272757475 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.04537935177947879, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.04537935177947879 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.017952449196987862, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.017952449196987862 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.025892151156709405, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.025892151156709405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372937, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.025336848563332372, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.025336848563332372 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.28270042194092826, + "acc_stderr": 0.029312814153955924, + "acc_norm": 0.28270042194092826, + "acc_norm_stderr": 0.029312814153955924 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045517, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045517 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693254, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326905, + "mc2": 0.4649376014172755, + "mc2_stderr": 0.015443831068166118 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791248, + "acc_norm": 0.35182998819362454, + "acc_norm_stderr": 0.016418206451218054 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v0", + "model_sha": "e2a3ee343f997cca7ad3e25b5d970376d79c5b4e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v1.1/result_2023-11-02 21:53:46.json b/eclipsemint/kollama2-7b-v1.1/result_2023-11-02 21:53:46.json new file mode 100644 index 0000000000000000000000000000000000000000..33b0d70cf77fd556e657ee3c891f1f15b01f5476 --- /dev/null +++ b/eclipsemint/kollama2-7b-v1.1/result_2023-11-02 21:53:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20051194539249148, + "acc_stderr": 0.011700318050499354, + "acc_norm": 0.25341296928327645, + "acc_norm_stderr": 0.012710896778378606 + }, + "harness|ko_hellaswag|10": { + "acc": 0.27614021111332404, + "acc_stderr": 0.004461732908157659, + "acc_norm": 0.29904401513642703, + "acc_norm_stderr": 0.004569034613332603 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21711366538952745, + "acc_stderr": 0.014743125394823297, + "acc_norm": 0.21711366538952745, + "acc_norm_stderr": 0.014743125394823297 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.03550920185689631, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.03550920185689631 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.024406162094668886, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.024406162094668886 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.18834080717488788, + "acc_stderr": 0.026241132996407252, + "acc_norm": 0.18834080717488788, + "acc_norm_stderr": 0.026241132996407252 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596918, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596918 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.032586303838365555, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.032586303838365555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.03006676158297794, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.03006676158297794 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.021606294494647727, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.021606294494647727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.02874898368994106, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.02874898368994106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33225806451612905, + "acc_stderr": 0.02679556084812279, + "acc_norm": 0.33225806451612905, + "acc_norm_stderr": 0.02679556084812279 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.29056603773584905, + "acc_stderr": 0.02794321998933714, + "acc_norm": 0.29056603773584905, + "acc_norm_stderr": 0.02794321998933714 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624555, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624555 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.1994219653179191, + "acc_stderr": 0.021511900654252552, + "acc_norm": 0.1994219653179191, + "acc_norm_stderr": 0.021511900654252552 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24382716049382716, + "acc_stderr": 0.023891879541959614, + "acc_norm": 0.24382716049382716, + "acc_norm_stderr": 0.023891879541959614 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30091743119266057, + "acc_stderr": 0.01966475136680211, + "acc_norm": 0.30091743119266057, + "acc_norm_stderr": 0.01966475136680211 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.02367908986180772, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.02367908986180772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.043207678075366684, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.043207678075366684 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351585, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.016774672365468514, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.016774672365468514 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.024847921358063962, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.024847921358063962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347018, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347018 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.031001209039894836, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.031001209039894836 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.02944377302259469, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.02944377302259469 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2522816166883963, + "acc_stderr": 0.011092789056875246, + "acc_norm": 0.2522816166883963, + "acc_norm_stderr": 0.011092789056875246 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693268, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693268 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511782, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511782 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2252141982864137, + "mc1_stderr": 0.014623240768023503, + "mc2": 0.45823651606631305, + "mc2_stderr": 0.01710273017399995 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22904368358913813, + "acc_stderr": 0.01444737227725382, + "acc_norm": 0.282172373081464, + "acc_norm_stderr": 0.015473271583988433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v1.1", + "model_sha": "02268afbed60e68ba0142404ddd5a2c0031a3420", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v1.2/result_2023-11-04 06:39:14.json b/eclipsemint/kollama2-7b-v1.2/result_2023-11-04 06:39:14.json new file mode 100644 index 0000000000000000000000000000000000000000..2bb458a742a79739cc2e864df96be8b550a0910f --- /dev/null +++ b/eclipsemint/kollama2-7b-v1.2/result_2023-11-04 06:39:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20051194539249148, + "acc_stderr": 0.011700318050499361, + "acc_norm": 0.23976109215017063, + "acc_norm_stderr": 0.012476304127453956 + }, + "harness|ko_hellaswag|10": { + "acc": 0.280920135431189, + "acc_stderr": 0.004485300194072271, + "acc_norm": 0.3069109739095798, + "acc_norm_stderr": 0.00460269541675698 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038245, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038245 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.03760178006026621, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.03760178006026621 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.015745497169049057, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.015745497169049057 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977148, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977148 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21084337349397592, + "acc_stderr": 0.031755547866299194, + "acc_norm": 0.21084337349397592, + "acc_norm_stderr": 0.031755547866299194 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818777, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818777 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533084, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533084 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362466, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362466 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277726, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277726 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204416, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204416 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.038260763248848646, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.038260763248848646 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.27350427350427353, + "acc_stderr": 0.029202540153431166, + "acc_norm": 0.27350427350427353, + "acc_norm_stderr": 0.029202540153431166 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.02560423347089911, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.02560423347089911 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014652, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014652 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047875, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047875 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.02271746789770861, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.02271746789770861 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258172, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258172 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.03559039531617342, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.03559039531617342 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02540719779889017, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02540719779889017 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.01822407811729907, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.01822407811729907 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.024739981355113592, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.024739981355113592 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119667, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119667 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.017740899509177788, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.017740899509177788 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.02635806569888059, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.02635806569888059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.0312803908432988, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.0312803908432988 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1948529411764706, + "acc_stderr": 0.024060599423487424, + "acc_norm": 0.1948529411764706, + "acc_norm_stderr": 0.024060599423487424 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.02783302387139968, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.02783302387139968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24472573839662448, + "acc_stderr": 0.027985699387036413, + "acc_norm": 0.24472573839662448, + "acc_norm_stderr": 0.027985699387036413 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845528, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845528 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23011015911872704, + "mc1_stderr": 0.014734557959807763, + "mc2": 0.4651387560988257, + "mc2_stderr": 0.0170466746202686 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23730814639905548, + "acc_stderr": 0.01462667783718623, + "acc_norm": 0.3246753246753247, + "acc_norm_stderr": 0.016098883939346467 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v1.2", + "model_sha": "c69cbcd522d1a49ae1576342c027ceadc57de738", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v1.3/result_2023-11-05 03:47:02.json b/eclipsemint/kollama2-7b-v1.3/result_2023-11-05 03:47:02.json new file mode 100644 index 0000000000000000000000000000000000000000..51acf406eb716b398cfe676b5175ce4cbbf5ab83 --- /dev/null +++ b/eclipsemint/kollama2-7b-v1.3/result_2023-11-05 03:47:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19539249146757678, + "acc_stderr": 0.01158690718995291, + "acc_norm": 0.24829351535836178, + "acc_norm_stderr": 0.012624912868089755 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2832105158334993, + "acc_stderr": 0.0044963697421321076, + "acc_norm": 0.3134833698466441, + "acc_norm_stderr": 0.0046296088632722925 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.03675668832233188, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.03675668832233188 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.015842430835269438, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.015842430835269438 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066655, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066655 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2170212765957447, + "acc_stderr": 0.026947483121496252, + "acc_norm": 0.2170212765957447, + "acc_norm_stderr": 0.026947483121496252 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2572347266881029, + "acc_stderr": 0.024826171289250888, + "acc_norm": 0.2572347266881029, + "acc_norm_stderr": 0.024826171289250888 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596917, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596917 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.031353050095330834, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.031353050095330834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467122, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467122 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.029213549414372174, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.029213549414372174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.02306043838085774, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.02306043838085774 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.037552658650371835, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.037552658650371835 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.031089826002937523, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.031089826002937523 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.0255606047210229, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.0255606047210229 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.25213675213675213, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.25213675213675213, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.038313051408846006, + "acc_norm": 0.2, + "acc_norm_stderr": 0.038313051408846006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.031524391865554, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.031524391865554 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.02264421261552521, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.02264421261552521 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080342, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080342 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388677006, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388677006 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868034, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868034 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089116, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089116 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.018125669180861507, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.018125669180861507 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952924, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.017986615304030305, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.017986615304030305 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.026799562024887653, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.026799562024887653 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.028304657943035303, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.028304657943035303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.01118610904656461, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.01118610904656461 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23378212974296206, + "mc1_stderr": 0.01481619599193158, + "mc2": 0.46005718929477757, + "mc2_stderr": 0.016990439061351184 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.01484604496825225, + "acc_norm": 0.29161747343565525, + "acc_norm_stderr": 0.015626276690070242 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v1.3", + "model_sha": "ba1caccde94a38f8e099177229e71b93a9aac534", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v1/result_2023-11-01 09:12:02.json b/eclipsemint/kollama2-7b-v1/result_2023-11-01 09:12:02.json new file mode 100644 index 0000000000000000000000000000000000000000..42867a3f2c8e05e16e25dab50af7ad87a5d75b32 --- /dev/null +++ b/eclipsemint/kollama2-7b-v1/result_2023-11-01 09:12:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1962457337883959, + "acc_stderr": 0.011606019881416282, + "acc_norm": 0.22781569965870307, + "acc_norm_stderr": 0.012256708602326902 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28032264489145586, + "acc_stderr": 0.004482388821388948, + "acc_norm": 0.31009759012148974, + "acc_norm_stderr": 0.0046158803527997444 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260595, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260595 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683228, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683228 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2572347266881029, + "acc_stderr": 0.024826171289250888, + "acc_norm": 0.2572347266881029, + "acc_norm_stderr": 0.024826171289250888 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.031265112061730424, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.031265112061730424 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587403, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735703, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735703 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124252, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124252 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032499, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032499 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714857, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714857 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.02866685779027465, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.02866685779027465 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23989569752281617, + "acc_stderr": 0.010906282617981652, + "acc_norm": 0.23989569752281617, + "acc_norm_stderr": 0.010906282617981652 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.44801118006268165, + "mc2_stderr": 0.016262139478608006 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.20188902007083825, + "acc_stderr": 0.013800753895777422, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.015311853110300352 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v1", + "model_sha": "d3271305724d054f37807dae60c6c875d0092362", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ehartford/dolphin-2.2.1-mistral-7b/result_2023-12-18 04:48:34.json b/ehartford/dolphin-2.2.1-mistral-7b/result_2023-12-18 04:48:34.json new file mode 100644 index 0000000000000000000000000000000000000000..221b2d5445caecd9224f0acce2e50720b7536547 --- /dev/null +++ b/ehartford/dolphin-2.2.1-mistral-7b/result_2023-12-18 04:48:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3430034129692833, + "acc_stderr": 0.013872423223718166, + "acc_norm": 0.39761092150170646, + "acc_norm_stderr": 0.014301752223279535 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3685520812587134, + "acc_stderr": 0.004814261966376846, + "acc_norm": 0.46484763991236805, + "acc_norm_stderr": 0.004977434505403359 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236784, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236784 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.035315058793591834, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.035315058793591834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.028447965476231022, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.028447965476231022 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.03461199429040014, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.03461199429040014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149145, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149145 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225882, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225882 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529658, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529658 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.02878222756134725, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.02878222756134725 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.045723723587374296, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.045723723587374296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220285, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002575, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32313341493268055, + "mc1_stderr": 0.016371836286454607, + "mc2": 0.5137325149564673, + "mc2_stderr": 0.015714111156826572 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.01690006287942712, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.017090852631668332 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ehartford/dolphin-2.2.1-mistral-7b", + "model_sha": "001b48e9aebffb395c698af47b6b48364cc3cbe8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-13b-dpo-test/result_2023-12-18 08:27:20.json b/etri-xainlp/llama2-13b-dpo-test/result_2023-12-18 08:27:20.json new file mode 100644 index 0000000000000000000000000000000000000000..7588deb475eca5005260012ee4ac068e1eb9f428 --- /dev/null +++ b/etri-xainlp/llama2-13b-dpo-test/result_2023-12-18 08:27:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3395904436860068, + "acc_stderr": 0.01383903976282016, + "acc_norm": 0.39505119453924914, + "acc_norm_stderr": 0.014285898292938177 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3646683927504481, + "acc_stderr": 0.004803533333364228, + "acc_norm": 0.46932881896036643, + "acc_norm_stderr": 0.004980384575535392 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.017867695938429778, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.017867695938429778 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.03561625488673745, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.03561625488673745 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830524, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830524 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.03222414045241107, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.03222414045241107 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389174, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389174 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267439, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267439 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.02345603738398203, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.02345603738398203 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.026882643434022885, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.026882643434022885 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.036072280610477486, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.036072280610477486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43119266055045874, + "acc_stderr": 0.021233365030319563, + "acc_norm": 0.43119266055045874, + "acc_norm_stderr": 0.021233365030319563 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.315359477124183, + "acc_stderr": 0.018798086284886887, + "acc_norm": 0.315359477124183, + "acc_norm_stderr": 0.018798086284886887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.02657786094330786, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.02657786094330786 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.02806499816704009, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.02806499816704009 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4050632911392405, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.4050632911392405, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.01175993961808546, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.01175993961808546 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.034107853389047184, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.034107853389047184 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.3995235998430406, + "mc2_stderr": 0.01495528755082175 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342558, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.017133218276537673 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-13b-dpo-test", + "model_sha": "de694283acd14414b7309a9874a3d033926b22a5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-ko-13b-instruct-v1.1/result_2023-11-26 05:24:47.json b/etri-xainlp/llama2-ko-13b-instruct-v1.1/result_2023-11-26 05:24:47.json new file mode 100644 index 0000000000000000000000000000000000000000..87760816a80b067460f9c60aff07df6584b2d2df --- /dev/null +++ b/etri-xainlp/llama2-ko-13b-instruct-v1.1/result_2023-11-26 05:24:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.013888816286782112, + "acc_norm": 0.39761092150170646, + "acc_norm_stderr": 0.014301752223279531 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3676558454491137, + "acc_stderr": 0.004811815959388828, + "acc_norm": 0.46574387572196774, + "acc_norm_stderr": 0.004978056798794866 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.039446241625011175, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.039446241625011175 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745647, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745647 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824665, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824665 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484627, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484627 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.02737987122994324, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.02737987122994324 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641087, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641087 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33584905660377357, + "acc_stderr": 0.029067220146644823, + "acc_norm": 0.33584905660377357, + "acc_norm_stderr": 0.029067220146644823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165581, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165581 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165581, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165581 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.02733954664066273, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.02733954664066273 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159596, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159596 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.01866335967146367, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.01866335967146367 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681407, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681407 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3673469387755102, + "acc_stderr": 0.030862144921087565, + "acc_norm": 0.3673469387755102, + "acc_norm_stderr": 0.030862144921087565 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.0325446201076786, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.0325446201076786 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741523, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741523 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557956, + "mc2": 0.4195624279144106, + "mc2_stderr": 0.015536654449711767 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3541912632821724, + "acc_stderr": 0.016443175749214757, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.016819438642971404 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-ko-13b-instruct-v1.1", + "model_sha": "159f0e387cfaff0e87278a95af4803ae9dd8b718", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-ko-13b-instruct-v1.2/result_2023-11-28 02:12:30.json b/etri-xainlp/llama2-ko-13b-instruct-v1.2/result_2023-11-28 02:12:30.json new file mode 100644 index 0000000000000000000000000000000000000000..8ea95a96c8ed3c9bbd1da10d68a82f3ea7c2c8f2 --- /dev/null +++ b/etri-xainlp/llama2-ko-13b-instruct-v1.2/result_2023-11-28 02:12:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3378839590443686, + "acc_stderr": 0.013822047922283509, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.01415063143511173 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36317466640111534, + "acc_stderr": 0.004799317209902019, + "acc_norm": 0.45867357100179246, + "acc_norm_stderr": 0.004972708369656541 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.03645981377388806, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.03645981377388806 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47126436781609193, + "acc_stderr": 0.017850410794380166, + "acc_norm": 0.47126436781609193, + "acc_norm_stderr": 0.017850410794380166 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.0350729543137052, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.0350729543137052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467766, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467766 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41414141414141414, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.41414141414141414, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.0394170763206489, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.0394170763206489 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.031918633744784645, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.031918633744784645 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.024756000382130945, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.024756000382130945 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.027379871229943238, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.027379871229943238 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5, + "acc_stderr": 0.03275608910402091, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03275608910402091 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199586, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199586 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073824, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073824 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3439306358381503, + "acc_stderr": 0.025574123786546672, + "acc_norm": 0.3439306358381503, + "acc_norm_stderr": 0.025574123786546672 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.038020681028996146, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.038020681028996146 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.02723741509459247, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.02723741509459247 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43853211009174314, + "acc_stderr": 0.02127471307395458, + "acc_norm": 0.43853211009174314, + "acc_norm_stderr": 0.02127471307395458 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824096, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824096 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4793388429752066, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.01897542792050721, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.01897542792050721 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.032259413526312945, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.032259413526312945 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966339, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966339 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.02725720260611495, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.02725720260611495 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.031512360446742806, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.031512360446742806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48523206751054854, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.48523206751054854, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503316, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.39420490667593977, + "mc2_stderr": 0.015249702539058304 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.35537190082644626, + "acc_stderr": 0.01645549600031452, + "acc_norm": 0.3789846517119244, + "acc_norm_stderr": 0.01667926068422929 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-ko-13b-instruct-v1.2", + "model_sha": "3f79d4ea5fd24ad29521814ce0f8462a9f6828dd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-ko-13b-instruct-v1/result_2023-10-30 03:35:54.json b/etri-xainlp/llama2-ko-13b-instruct-v1/result_2023-10-30 03:35:54.json new file mode 100644 index 0000000000000000000000000000000000000000..b882201a850ce514c6036117935eee568a30ed9c --- /dev/null +++ b/etri-xainlp/llama2-ko-13b-instruct-v1/result_2023-10-30 03:35:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4087030716723549, + "acc_stderr": 0.014365750345427006, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.014521226405627072 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42053375821549493, + "acc_stderr": 0.00492635856449457, + "acc_norm": 0.5438159729137622, + "acc_norm_stderr": 0.004970585328297623 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5031928480204342, + "acc_stderr": 0.017879598945933085, + "acc_norm": 0.5031928480204342, + "acc_norm_stderr": 0.017879598945933085 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764187, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764187 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836928, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344944, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344944 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.02413015829976262, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.02413015829976262 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5247706422018349, + "acc_stderr": 0.021410999753635914, + "acc_norm": 0.5247706422018349, + "acc_norm_stderr": 0.021410999753635914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225875, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225875 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.01965992249362335, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.01965992249362335 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952683, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952683 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553974, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.02928941340940319, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.02928941340940319 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741518, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.44031892549959717, + "mc2_stderr": 0.015641862520853814 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682868 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-ko-13b-instruct-v1", + "model_sha": "79d4bd9490cf7cc0015f950aeed3e5798c662ea2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-ko-13b-instruct/result_2023-10-06 10:52:22.json b/etri-xainlp/llama2-ko-13b-instruct/result_2023-10-06 10:52:22.json new file mode 100644 index 0000000000000000000000000000000000000000..8952d6dbabbb4513b503253312c9a9dd4467b4f1 --- /dev/null +++ b/etri-xainlp/llama2-ko-13b-instruct/result_2023-10-06 10:52:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4087030716723549, + "acc_stderr": 0.014365750345427006, + "acc_norm": 0.44795221843003413, + "acc_norm_stderr": 0.01453201149821167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4176458872734515, + "acc_stderr": 0.00492163264510238, + "acc_norm": 0.5456084445329615, + "acc_norm_stderr": 0.004968979259738337 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041982, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.02836504154256457, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.02836504154256457 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.02413015829976262, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.02413015829976262 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43641618497109824, + "acc_stderr": 0.026700545424943684, + "acc_norm": 0.43641618497109824, + "acc_norm_stderr": 0.026700545424943684 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.027272582849839796, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.027272582849839796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.01948802574552967, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.01948802574552967 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190714, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.459915611814346, + "acc_stderr": 0.03244246810187914, + "acc_norm": 0.459915611814346, + "acc_norm_stderr": 0.03244246810187914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459315, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459315 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024623, + "mc2": 0.4417936176466885, + "mc2_stderr": 0.015776414620892073 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.01690006287942712, + "acc_norm": 0.4592680047225502, + "acc_norm_stderr": 0.01713321827653767 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-ko-13b-instruct", + "model_sha": "5be30496ddc86d18eff1df9aab04e5c246fb2d86", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/polyglot-ko-12.8b-instruct/result_2023-10-05 00:56:42.json b/etri-xainlp/polyglot-ko-12.8b-instruct/result_2023-10-05 00:56:42.json new file mode 100644 index 0000000000000000000000000000000000000000..03f20af68dcbe26a6b13c81010a7bd34e33eaa20 --- /dev/null +++ b/etri-xainlp/polyglot-ko-12.8b-instruct/result_2023-10-05 00:56:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31143344709897613, + "acc_stderr": 0.013532472099850947, + "acc_norm": 0.3464163822525597, + "acc_norm_stderr": 0.013905011180063247 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4026090420235013, + "acc_stderr": 0.0048942100113032105, + "acc_norm": 0.5198167695678152, + "acc_norm_stderr": 0.004985860853427639 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.040580420156460344, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.040580420156460344 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2822477650063857, + "acc_stderr": 0.01609530296987857, + "acc_norm": 0.2822477650063857, + "acc_norm_stderr": 0.01609530296987857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03820169914517905, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03820169914517905 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412483, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412483 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21864951768488747, + "acc_stderr": 0.023475581417861113, + "acc_norm": 0.21864951768488747, + "acc_norm_stderr": 0.023475581417861113 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.26905829596412556, + "acc_stderr": 0.02976377940687498, + "acc_norm": 0.26905829596412556, + "acc_norm_stderr": 0.02976377940687498 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378947, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378947 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868956, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.021606294494647727, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.021606294494647727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094632, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094632 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.029678333141444465, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.029678333141444465 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.29914529914529914, + "acc_stderr": 0.029996951858349476, + "acc_norm": 0.29914529914529914, + "acc_norm_stderr": 0.029996951858349476 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.02761116340239972, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.02761116340239972 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804723, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804723 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.02970528405677244, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.02970528405677244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3092485549132948, + "acc_stderr": 0.02488314057007176, + "acc_norm": 0.3092485549132948, + "acc_norm_stderr": 0.02488314057007176 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.025251173936495026, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.025251173936495026 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.02977866303775296, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.02977866303775296 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20917431192660552, + "acc_stderr": 0.01743793717334323, + "acc_norm": 0.20917431192660552, + "acc_norm_stderr": 0.01743793717334323 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.040261875275912046, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.040261875275912046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013317, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013317 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.017917974069594726, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.017917974069594726 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872416, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872416 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03246887243637649, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03246887243637649 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.02736586113151381, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.02736586113151381 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22362869198312235, + "acc_stderr": 0.027123298205229972, + "acc_norm": 0.22362869198312235, + "acc_norm_stderr": 0.027123298205229972 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26401564537157757, + "acc_stderr": 0.011258435537723812, + "acc_norm": 0.26401564537157757, + "acc_norm_stderr": 0.011258435537723812 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083291, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083291 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156472, + "mc2": 0.4202272328082401, + "mc2_stderr": 0.016142378134497877 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30342384887839435, + "acc_stderr": 0.01580607271790957, + "acc_norm": 0.3447461629279811, + "acc_norm_stderr": 0.01634064990541869 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/polyglot-ko-12.8b-instruct", + "model_sha": "ec0113994052a77ef4741cf14d7a9af887b2e1d5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/fiveflow/koMistral-v0.1-neftune/result_2023-11-20 14:32:29.json b/fiveflow/koMistral-v0.1-neftune/result_2023-11-20 14:32:29.json new file mode 100644 index 0000000000000000000000000000000000000000..1430ec9a13b6fb3dbffe901f1481d5c66fd9653b --- /dev/null +++ b/fiveflow/koMistral-v0.1-neftune/result_2023-11-20 14:32:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1885665529010239, + "acc_stderr": 0.011430897647675832, + "acc_norm": 0.23208191126279865, + "acc_norm_stderr": 0.012336718284948856 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2704640509858594, + "acc_stderr": 0.004432917403755054, + "acc_norm": 0.28589922326229833, + "acc_norm_stderr": 0.004509181919322858 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3567251461988304, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.3567251461988304, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.03675668832233188, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.03675668832233188 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26181353767560667, + "acc_stderr": 0.015720838678445273, + "acc_norm": 0.26181353767560667, + "acc_norm_stderr": 0.015720838678445273 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073465, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073465 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2170212765957447, + "acc_stderr": 0.026947483121496238, + "acc_norm": 0.2170212765957447, + "acc_norm_stderr": 0.026947483121496238 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2057877813504823, + "acc_stderr": 0.022961339906764244, + "acc_norm": 0.2057877813504823, + "acc_norm_stderr": 0.022961339906764244 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.03021683101150876, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.03021683101150876 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.026265024608275882, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.026265024608275882 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371383, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371383 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.031089826002937523, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.031089826002937523 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2161290322580645, + "acc_stderr": 0.02341529343356852, + "acc_norm": 0.2161290322580645, + "acc_norm_stderr": 0.02341529343356852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727756, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727756 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.21851851851851853, + "acc_stderr": 0.025195752251823796, + "acc_norm": 0.21851851851851853, + "acc_norm_stderr": 0.025195752251823796 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198816, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198816 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.02122708244944504, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.02122708244944504 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2147239263803681, + "acc_stderr": 0.032262193772867744, + "acc_norm": 0.2147239263803681, + "acc_norm_stderr": 0.032262193772867744 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02313237623454334, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02313237623454334 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19170984455958548, + "acc_stderr": 0.028408953626245258, + "acc_norm": 0.19170984455958548, + "acc_norm_stderr": 0.028408953626245258 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1908256880733945, + "acc_stderr": 0.016847676400091105, + "acc_norm": 0.1908256880733945, + "acc_norm_stderr": 0.016847676400091105 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.038522733649243156, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.038522733649243156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02380518652488814, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02380518652488814 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19117647058823528, + "acc_stderr": 0.023886881922440355, + "acc_norm": 0.19117647058823528, + "acc_norm_stderr": 0.023886881922440355 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.01100597139992723, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.01100597139992723 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087304, + "mc2": 0.46232982151436586, + "mc2_stderr": 0.01648243139543783 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.20425029515938606, + "acc_stderr": 0.013860675878176822, + "acc_norm": 0.4014167650531287, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "fiveflow/koMistral-v0.1-neftune", + "model_sha": "44a5ba8db203f2982dfcb5c416a45c5b737b6898", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/fiveflow/kolong-llama-v0.1/result_2023-10-10 02:31:12.json b/fiveflow/kolong-llama-v0.1/result_2023-10-10 02:31:12.json new file mode 100644 index 0000000000000000000000000000000000000000..4d721ac9f7d617205d4c42edb7685e398502c733 --- /dev/null +++ b/fiveflow/kolong-llama-v0.1/result_2023-10-10 02:31:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27474402730375425, + "acc_stderr": 0.013044617212771227, + "acc_norm": 0.32081911262798635, + "acc_norm_stderr": 0.013640943091946526 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35660227046405096, + "acc_stderr": 0.0047801698733328435, + "acc_norm": 0.45717984465245964, + "acc_norm_stderr": 0.004971449552787173 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.30268199233716475, + "acc_stderr": 0.016428781581749367, + "acc_norm": 0.30268199233716475, + "acc_norm_stderr": 0.016428781581749367 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939101, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939101 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.028957342788342343, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.028957342788342343 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663925, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663925 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3183279742765273, + "acc_stderr": 0.026457225067811018, + "acc_norm": 0.3183279742765273, + "acc_norm_stderr": 0.026457225067811018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291954, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291954 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.03289477330098614, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.03289477330098614 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.03664666337225256, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.03664666337225256 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380558, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380558 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.021278393863586282, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.021278393863586282 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243838, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243838 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22167487684729065, + "acc_stderr": 0.029225575892489607, + "acc_norm": 0.22167487684729065, + "acc_norm_stderr": 0.029225575892489607 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.02518900666021238, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.02518900666021238 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.29914529914529914, + "acc_stderr": 0.029996951858349476, + "acc_norm": 0.29914529914529914, + "acc_norm_stderr": 0.029996951858349476 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.028254200344438665, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.028254200344438665 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.044942908662520896, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.044942908662520896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935555, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935555 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.02454761779480383, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.02454761779480383 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.024569223600460845, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.024569223600460845 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29541284403669726, + "acc_stderr": 0.019560619182976, + "acc_norm": 0.29541284403669726, + "acc_norm_stderr": 0.019560619182976 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.034550710191021475, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.034550710191021475 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.026716118380156837, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.026716118380156837 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19834710743801653, + "acc_stderr": 0.03640118271990945, + "acc_norm": 0.19834710743801653, + "acc_norm_stderr": 0.03640118271990945 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.034597776068105365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.034597776068105365 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318075, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318075 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372944, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372944 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409163, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409163 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411127, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411127 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.288135593220339, + "acc_stderr": 0.011567140661324561, + "acc_norm": 0.288135593220339, + "acc_norm_stderr": 0.011567140661324561 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.031822318676475544, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.031822318676475544 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.21542227662178703, + "mc1_stderr": 0.01439190265242768, + "mc2": 0.37745653236553117, + "mc2_stderr": 0.015551417113340219 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.17355371900826447, + "acc_stderr": 0.013020842794398262, + "acc_norm": 0.2408500590318772, + "acc_norm_stderr": 0.014701172662583915 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "fiveflow/kolong-llama-v0.1", + "model_sha": "e9ed499df932c04d7d3106603136f469c2f57aaa", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge/result_2023-11-01 15:54:56.json b/gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge/result_2023-11-01 15:54:56.json new file mode 100644 index 0000000000000000000000000000000000000000..1d545d86f2f06f459fe47f6f2b6679fb2a9969c2 --- /dev/null +++ b/gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge/result_2023-11-01 15:54:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955005, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256512 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4130651264688309, + "acc_stderr": 0.0049137803474988756, + "acc_norm": 0.5571599283011353, + "acc_norm_stderr": 0.004957068377516513 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49680715197956576, + "acc_stderr": 0.017879598945933065, + "acc_norm": 0.49680715197956576, + "acc_norm_stderr": 0.017879598945933065 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138621, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138621 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.03175367846096625, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.03175367846096625 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761547, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761547 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.030102793781791194, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.030102793781791194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137288, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137288 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.03567603799639172, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.03567603799639172 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.0276847214156562, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.0276847214156562 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254889, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254889 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924317, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924317 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110307, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110307 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791033, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791033 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.031001209039894836, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.031001209039894836 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28683181225554105, + "acc_stderr": 0.011551504781176933, + "acc_norm": 0.28683181225554105, + "acc_norm_stderr": 0.011551504781176933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.43540541386680215, + "mc2_stderr": 0.015086654503820634 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43683589138134593, + "acc_stderr": 0.017052633559856076, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682868 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge", + "model_sha": "b38c6d07fbdb2119f7c1ee28c1a764c305547aec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge/result_2023-11-01 16:20:48.json b/gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge/result_2023-11-01 16:20:48.json new file mode 100644 index 0000000000000000000000000000000000000000..1d545d86f2f06f459fe47f6f2b6679fb2a9969c2 --- /dev/null +++ b/gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge/result_2023-11-01 16:20:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955005, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256512 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4130651264688309, + "acc_stderr": 0.0049137803474988756, + "acc_norm": 0.5571599283011353, + "acc_norm_stderr": 0.004957068377516513 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49680715197956576, + "acc_stderr": 0.017879598945933065, + "acc_norm": 0.49680715197956576, + "acc_norm_stderr": 0.017879598945933065 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138621, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138621 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.03175367846096625, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.03175367846096625 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761547, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761547 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.030102793781791194, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.030102793781791194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137288, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137288 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.03567603799639172, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.03567603799639172 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.0276847214156562, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.0276847214156562 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254889, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254889 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924317, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924317 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110307, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110307 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791033, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791033 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.031001209039894836, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.031001209039894836 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28683181225554105, + "acc_stderr": 0.011551504781176933, + "acc_norm": 0.28683181225554105, + "acc_norm_stderr": 0.011551504781176933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.43540541386680215, + "mc2_stderr": 0.015086654503820634 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43683589138134593, + "acc_stderr": 0.017052633559856076, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682868 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge", + "model_sha": "b38c6d07fbdb2119f7c1ee28c1a764c305547aec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/garage-bAInd/Platypus2-13B/result_2023-10-10 06:53:53.json b/garage-bAInd/Platypus2-13B/result_2023-10-10 06:53:53.json new file mode 100644 index 0000000000000000000000000000000000000000..e3c979e57da9a5bd8e4ec02a09a9026861529917 --- /dev/null +++ b/garage-bAInd/Platypus2-13B/result_2023-10-10 06:53:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3370307167235495, + "acc_stderr": 0.01381347665290228, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349822 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3623780123481378, + "acc_stderr": 0.004797048154893968, + "acc_norm": 0.46883091017725553, + "acc_norm_stderr": 0.00498007670739243 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491355, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339525, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339525 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296546, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296546 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115215, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115215 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608466, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608466 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952166, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952166 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379417, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379417 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680804, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.04537935177947879, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.04537935177947879 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573702, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573702 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882601, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882601 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125468, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125468 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163907, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163907 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228568, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228568 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.0156596057553269, + "mc2": 0.44247428746712286, + "mc2_stderr": 0.015350644205547385 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3612750885478158, + "acc_stderr": 0.016515463022412014, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.016876941165045612 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "garage-bAInd/Platypus2-13B", + "model_sha": "0a474bc0e76203528db789f027f4d6cce2727cce", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/garage-bAInd/Platypus2-7B/result_2023-10-10 06:53:10.json b/garage-bAInd/Platypus2-7B/result_2023-10-10 06:53:10.json new file mode 100644 index 0000000000000000000000000000000000000000..4f841716e826c07b66d0fdc31141e728ae3e3936 --- /dev/null +++ b/garage-bAInd/Platypus2-7B/result_2023-10-10 06:53:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537365, + "acc_norm": 0.3242320819112628, + "acc_norm_stderr": 0.013678810399518822 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3348934475204143, + "acc_stderr": 0.004709886644157085, + "acc_norm": 0.4153555068711412, + "acc_norm_stderr": 0.0049177611817401625 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41762452107279696, + "acc_stderr": 0.01763563732695152, + "acc_norm": 0.41762452107279696, + "acc_norm_stderr": 0.01763563732695152 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.02951319662553935, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.02951319662553935 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.03550920185689629, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.03550920185689629 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751475, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751475 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.033832012232444426, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.033832012232444426 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.02311936275823229, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.02311936275823229 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.027379871229943238, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.027379871229943238 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5512820512820513, + "acc_stderr": 0.032583346493868806, + "acc_norm": 0.5512820512820513, + "acc_norm_stderr": 0.032583346493868806 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3132075471698113, + "acc_stderr": 0.02854479331905533, + "acc_norm": 0.3132075471698113, + "acc_norm_stderr": 0.02854479331905533 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425464, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425464 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507383, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507383 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.03533133389323657, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.03533133389323657 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.026424816594009845, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.026424816594009845 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38580246913580246, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.38580246913580246, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3559633027522936, + "acc_stderr": 0.020528559278244218, + "acc_norm": 0.3559633027522936, + "acc_norm_stderr": 0.020528559278244218 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.026925654653615686, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.026925654653615686 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.01941253924203216, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.01941253924203216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.0278079901413202, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.0278079901413202 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953195, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953195 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.01437816988409842, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.01437816988409842 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.02866685779027465, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.02866685779027465 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.46835443037974683, + "acc_stderr": 0.03248197400511075, + "acc_norm": 0.46835443037974683, + "acc_norm_stderr": 0.03248197400511075 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715923, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715923 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.01565960575532691, + "mc2": 0.4571739435072619, + "mc2_stderr": 0.015454282704862585 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28689492325855964, + "acc_stderr": 0.015550809966781778, + "acc_norm": 0.34946871310507677, + "acc_norm_stderr": 0.016392797085769854 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "garage-bAInd/Platypus2-7B", + "model_sha": "c27aff7201e611f301c0e19f351cbe74b1a9f1f1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/eclectus1.1/result_2023-12-26 02:23:09.json b/genne/eclectus1.1/result_2023-12-26 02:23:09.json new file mode 100644 index 0000000000000000000000000000000000000000..5d34b77a93672390b184673d2646f3603cfcff05 --- /dev/null +++ b/genne/eclectus1.1/result_2023-12-26 02:23:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3583617747440273, + "acc_stderr": 0.014012883334859862, + "acc_norm": 0.3924914675767918, + "acc_norm_stderr": 0.014269634635670712 + }, + "harness|ko_hellaswag|10": { + "acc": 0.429097789285003, + "acc_stderr": 0.00493935814556132, + "acc_norm": 0.5464050985859391, + "acc_norm_stderr": 0.004968244611429389 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.049486373240266356, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.049486373240266356 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5747126436781609, + "acc_stderr": 0.017679225489431457, + "acc_norm": 0.5747126436781609, + "acc_norm_stderr": 0.017679225489431457 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788682, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788682 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762906, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762906 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149354, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149354 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.02523038123893483, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.02523038123893483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507748, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507748 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342665, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.021393071222680818, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.021393071222680818 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175364, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38005215123859193, + "acc_stderr": 0.012397328205137803, + "acc_norm": 0.38005215123859193, + "acc_norm_stderr": 0.012397328205137803 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635896, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635896 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520705, + "mc2": 0.41502275168372754, + "mc2_stderr": 0.016098527627583504 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.016021427055309574, + "acc_norm": 0.33530106257378983, + "acc_norm_stderr": 0.01623098123298982 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/eclectus1.1", + "model_sha": "8afd6163d1bd3f27576f4879c2aac1b67771654f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/eclectus_1.1_dedup/result_2023-12-27 23:59:41.json b/genne/eclectus_1.1_dedup/result_2023-12-27 23:59:41.json new file mode 100644 index 0000000000000000000000000000000000000000..9329c1c498d30ec88261848288e5f80ea926b35a --- /dev/null +++ b/genne/eclectus_1.1_dedup/result_2023-12-27 23:59:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44112627986348124, + "acc_stderr": 0.014509747749064663, + "acc_norm": 0.4812286689419795, + "acc_norm_stderr": 0.014601090150633964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4441346345349532, + "acc_stderr": 0.004958537988993583, + "acc_norm": 0.5880302728540131, + "acc_norm_stderr": 0.004911837730582199 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.048979577377811674, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.048979577377811674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846475, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846475 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674078, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756646, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756646 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.027648477877413324, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.027648477877413324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008587, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008587 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.02097146994790053, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.02097146994790053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604673, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604673 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281525, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281525 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261462, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261462 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.02997280717046463, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.02997280717046463 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763128, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3376792698826597, + "acc_stderr": 0.01207856377714555, + "acc_norm": 0.3376792698826597, + "acc_norm_stderr": 0.01207856377714555 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070263, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070263 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775527, + "mc2": 0.4309522207775758, + "mc2_stderr": 0.01552456574847187 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42621015348288077, + "acc_stderr": 0.01700212260948926, + "acc_norm": 0.4734356552538371, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/eclectus_1.1_dedup", + "model_sha": "799c6defbb95129a59d3b6f1d363c6a7b908ba0e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/eclectus_7b_1.1/result_2023-12-26 23:43:05.json b/genne/eclectus_7b_1.1/result_2023-12-26 23:43:05.json new file mode 100644 index 0000000000000000000000000000000000000000..e0b0c6d6918092cf2cf28bb5a111d22721e79a4b --- /dev/null +++ b/genne/eclectus_7b_1.1/result_2023-12-26 23:43:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38822525597269625, + "acc_stderr": 0.014241614207414047, + "acc_norm": 0.44197952218430037, + "acc_norm_stderr": 0.014512682523128343 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4113722366062537, + "acc_stderr": 0.004910767540867421, + "acc_norm": 0.5376419040031866, + "acc_norm_stderr": 0.004975621147406101 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041986, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041986 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596241, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596241 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484627, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484627 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.03163145807552379, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.03163145807552379 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.02483881198803316, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02483881198803316 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539635, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.032485775115783995, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.032485775115783995 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119996, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119996 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4527363184079602, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.4527363184079602, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.02716368603827123, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.02716368603827123 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.021244146569074345, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.021244146569074345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.028408302020332694, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.028408302020332694 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.045454545454545484, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.045454545454545484 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849723, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849723 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573695, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573695 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103986, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103986 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.027971541370170598, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.027971541370170598 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503318, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.01520152224629994, + "mc2": 0.40284336981653474, + "mc2_stderr": 0.015378724374521922 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3010625737898465, + "acc_stderr": 0.015771113299945454, + "acc_norm": 0.3364817001180638, + "acc_norm_stderr": 0.01624508529438656 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/eclectus_7b_1.1", + "model_sha": "199e7a5119dd83ff2ca1a34dfb681bc89842517c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/electus_yiko_dpo/result_2023-12-27 23:34:02.json b/genne/electus_yiko_dpo/result_2023-12-27 23:34:02.json new file mode 100644 index 0000000000000000000000000000000000000000..27ecceb67e3493541ca97b3eab687340d26ac41c --- /dev/null +++ b/genne/electus_yiko_dpo/result_2023-12-27 23:34:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.013847460518892973, + "acc_norm": 0.3924914675767918, + "acc_norm_stderr": 0.014269634635670709 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3815972913762199, + "acc_stderr": 0.004847857546957471, + "acc_norm": 0.5190201155148377, + "acc_norm_stderr": 0.0049861698499463055 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.01785998976517645, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.01785998976517645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3954983922829582, + "acc_stderr": 0.027770918531427838, + "acc_norm": 0.3954983922829582, + "acc_norm_stderr": 0.027770918531427838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330313, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330313 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908227, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.046166311118017125, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.046166311118017125 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.03282649385304151, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.03282649385304151 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3774193548387097, + "acc_stderr": 0.027575960723278246, + "acc_norm": 0.3774193548387097, + "acc_norm_stderr": 0.027575960723278246 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.47863247863247865, + "acc_stderr": 0.032726164476349545, + "acc_norm": 0.47863247863247865, + "acc_norm_stderr": 0.032726164476349545 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3660377358490566, + "acc_stderr": 0.02964781353936525, + "acc_norm": 0.3660377358490566, + "acc_norm_stderr": 0.02964781353936525 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4527363184079602, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.4527363184079602, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048488, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048488 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46238532110091746, + "acc_stderr": 0.02137657527439758, + "acc_norm": 0.46238532110091746, + "acc_norm_stderr": 0.02137657527439758 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159624, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159624 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861131, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861131 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329882, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329882 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.028666857790274645, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.028666857790274645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.510548523206751, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.510548523206751, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27444589308996087, + "acc_stderr": 0.011397043163078154, + "acc_norm": 0.27444589308996087, + "acc_norm_stderr": 0.011397043163078154 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.03804913653971011, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.03804913653971011 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.4351917744597652, + "mc2_stderr": 0.015624548959574044 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32113341204250295, + "acc_stderr": 0.016052762579111576, + "acc_norm": 0.3624557260920897, + "acc_norm_stderr": 0.016527131240453713 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/electus_yiko_dpo", + "model_sha": "e6917de3d9ace4fd400e4a463ca8c1605ac613c1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/kiwi_solar_merge_slerp/result_2023-12-28 04:04:24.json b/genne/kiwi_solar_merge_slerp/result_2023-12-28 04:04:24.json new file mode 100644 index 0000000000000000000000000000000000000000..649379aa8346b82a57d93304940fdea072a58976 --- /dev/null +++ b/genne/kiwi_solar_merge_slerp/result_2023-12-28 04:04:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4044368600682594, + "acc_stderr": 0.014342036483436177, + "acc_norm": 0.4880546075085324, + "acc_norm_stderr": 0.014607220340597171 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41874128659629556, + "acc_stderr": 0.004923445627861518, + "acc_norm": 0.5595498904600678, + "acc_norm_stderr": 0.0049542655953734565 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6462324393358876, + "acc_stderr": 0.017098184708161896, + "acc_norm": 0.6462324393358876, + "acc_norm_stderr": 0.017098184708161896 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368878, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368878 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936338, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936338 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.027731258647012, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.027731258647012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.031282177063684614, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.031282177063684614 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5923076923076923, + "acc_stderr": 0.024915243985987857, + "acc_norm": 0.5923076923076923, + "acc_norm_stderr": 0.024915243985987857 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6096774193548387, + "acc_stderr": 0.027751256636969583, + "acc_norm": 0.6096774193548387, + "acc_norm_stderr": 0.027751256636969583 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700914, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700914 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.02555992055053101, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.02555992055053101 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6213872832369942, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.6213872832369942, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6752293577981652, + "acc_stderr": 0.02007772910931033, + "acc_norm": 0.6752293577981652, + "acc_norm_stderr": 0.02007772910931033 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.020220920829626902, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.020220920829626902 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.02949482760014437, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.02949482760014437 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210744, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210744 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.031001209039894843, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.031001209039894843 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.028304657943035307, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.028304657943035307 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41916558018252936, + "acc_stderr": 0.012602244505788222, + "acc_norm": 0.41916558018252936, + "acc_norm_stderr": 0.012602244505788222 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.032282103870378914, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.032282103870378914 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32068543451652387, + "mc1_stderr": 0.0163391703732809, + "mc2": 0.49040076769077373, + "mc2_stderr": 0.016166130098772675 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5608028335301063, + "acc_stderr": 0.017062775744780705, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972205 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/kiwi_solar_merge_slerp", + "model_sha": "223abb74d9d41d83e6c39d8ceedf86d3e270b6e2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/kiwi_solar_merge_ties/result_2023-12-28 05:58:45.json b/genne/kiwi_solar_merge_ties/result_2023-12-28 05:58:45.json new file mode 100644 index 0000000000000000000000000000000000000000..c62a0ead703c9f5161d0a9c69166563203c8308e --- /dev/null +++ b/genne/kiwi_solar_merge_ties/result_2023-12-28 05:58:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4044368600682594, + "acc_stderr": 0.014342036483436177, + "acc_norm": 0.4880546075085324, + "acc_norm_stderr": 0.014607220340597171 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41874128659629556, + "acc_stderr": 0.004923445627861518, + "acc_norm": 0.5596494722166899, + "acc_norm_stderr": 0.004954146286513353 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6462324393358876, + "acc_stderr": 0.017098184708161896, + "acc_norm": 0.6462324393358876, + "acc_norm_stderr": 0.017098184708161896 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368878, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368878 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936338, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936338 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.027731258647012, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.027731258647012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.031282177063684614, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.031282177063684614 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5923076923076923, + "acc_stderr": 0.024915243985987857, + "acc_norm": 0.5923076923076923, + "acc_norm_stderr": 0.024915243985987857 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6096774193548387, + "acc_stderr": 0.027751256636969583, + "acc_norm": 0.6096774193548387, + "acc_norm_stderr": 0.027751256636969583 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700914, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700914 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.02555992055053101, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.02555992055053101 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6213872832369942, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.6213872832369942, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6752293577981652, + "acc_stderr": 0.02007772910931033, + "acc_norm": 0.6752293577981652, + "acc_norm_stderr": 0.02007772910931033 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.020220920829626902, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.020220920829626902 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.02949482760014437, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.02949482760014437 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210744, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210744 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.031001209039894843, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.031001209039894843 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.028304657943035307, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.028304657943035307 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41916558018252936, + "acc_stderr": 0.012602244505788222, + "acc_norm": 0.41916558018252936, + "acc_norm_stderr": 0.012602244505788222 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.032282103870378914, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.032282103870378914 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32068543451652387, + "mc1_stderr": 0.0163391703732809, + "mc2": 0.49042655611434866, + "mc2_stderr": 0.016165929101778585 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5608028335301063, + "acc_stderr": 0.017062775744780705, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972205 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/kiwi_solar_merge_ties", + "model_sha": "6112b7b551288ec773639693987d950a26f035db", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter2.1/result_2023-11-10 01:58:53.json b/genne/otter2.1/result_2023-11-10 01:58:53.json new file mode 100644 index 0000000000000000000000000000000000000000..db3d66ac799f279bbe938836fe67892ce6c3b103 --- /dev/null +++ b/genne/otter2.1/result_2023-11-10 01:58:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.013406741767847619, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.013936809212158285 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3547102170882294, + "acc_stderr": 0.004774476498238616, + "acc_norm": 0.4565823541127266, + "acc_norm_stderr": 0.0049709334202319285 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3742690058479532, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.3742690058479532, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.35759897828863346, + "acc_stderr": 0.017139488998803284, + "acc_norm": 0.35759897828863346, + "acc_norm_stderr": 0.017139488998803284 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491227, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491227 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288086, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288086 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004917, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004917 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.03427308652999934, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.03427308652999934 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342863, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342863 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553883, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553883 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3418803418803419, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.3418803418803419, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443866, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443866 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959912, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959912 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473834, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473834 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.032801882053486414, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.032801882053486414 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889904, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889904 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106135, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826371, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826371 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.024946792225272314, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.024946792225272314 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.0360251131880677, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.0360251131880677 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30091743119266057, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.30091743119266057, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.027057974624494382, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.027057974624494382 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03583496176361061, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03583496176361061 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28594771241830064, + "acc_stderr": 0.01828048507295467, + "acc_norm": 0.28594771241830064, + "acc_norm_stderr": 0.01828048507295467 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.02668456434046099, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.02668456434046099 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605607, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605607 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468636, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468636 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254177, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254177 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.02599111767281329, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.02599111767281329 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786716, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786716 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27640156453715775, + "acc_stderr": 0.011422153194553582, + "acc_norm": 0.27640156453715775, + "acc_norm_stderr": 0.011422153194553582 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.03228210387037895, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.03228210387037895 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03681050869161548, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03681050869161548 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.40833060750474154, + "mc2_stderr": 0.015549016246770386 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22668240850059032, + "acc_stderr": 0.014394701800505921, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.015453559655458278 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter2.1", + "model_sha": "8f65311a3b97d2625b09d8d9cba853e791a52faf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.3.0/result_2023-11-10 02:56:11.json b/genne/otter3.1.3.0/result_2023-11-10 02:56:11.json new file mode 100644 index 0000000000000000000000000000000000000000..d297a8b33fdb230478afbfd18be05376e4949dfb --- /dev/null +++ b/genne/otter3.1.3.0/result_2023-11-10 02:56:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19880546075085323, + "acc_stderr": 0.011662850198175534, + "acc_norm": 0.25, + "acc_norm_stderr": 0.012653835621466646 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3110934076877116, + "acc_stderr": 0.004619948037222912, + "acc_norm": 0.3563035251941844, + "acc_norm_stderr": 0.004779276329704028 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501947, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501947 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.49959860935358086, + "mc2_stderr": 0.017034078108243818 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252247, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.014846044968252247 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.3.0", + "model_sha": "93f7d6bc33181cf6e444a0695e15c4a8859511d0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.3/result_2023-11-10 01:36:13.json b/genne/otter3.1.3/result_2023-11-10 01:36:13.json new file mode 100644 index 0000000000000000000000000000000000000000..4164e886b263742151e3a376a7ed8365de58aa23 --- /dev/null +++ b/genne/otter3.1.3/result_2023-11-10 01:36:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34215017064846415, + "acc_stderr": 0.01386415215917728, + "acc_norm": 0.40102389078498296, + "acc_norm_stderr": 0.014322255790719867 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3975303724357698, + "acc_stderr": 0.004883871774350596, + "acc_norm": 0.522903804023103, + "acc_norm_stderr": 0.004984543540932336 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3524904214559387, + "acc_stderr": 0.01708415024408137, + "acc_norm": 0.3524904214559387, + "acc_norm_stderr": 0.01708415024408137 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004917, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004917 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.031353050095330834, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.031353050095330834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185553, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185553 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.037932811853078084, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.037932811853078084 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361266, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361266 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2358974358974359, + "acc_stderr": 0.021525965407408726, + "acc_norm": 0.2358974358974359, + "acc_norm_stderr": 0.021525965407408726 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293752, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252089, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252089 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959312, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959312 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.030769444967296024, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.030769444967296024 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.022019080012217904, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.022019080012217904 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03309615177059006, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03309615177059006 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33024691358024694, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.33024691358024694, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29015544041450775, + "acc_stderr": 0.03275264467791515, + "acc_norm": 0.29015544041450775, + "acc_norm_stderr": 0.03275264467791515 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30458715596330277, + "acc_stderr": 0.019732299420354038, + "acc_norm": 0.30458715596330277, + "acc_norm_stderr": 0.019732299420354038 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011745, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011745 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.025058503316958154, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.025058503316958154 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.018152871051538812, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.018152871051538812 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.02623287897149166, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.02623287897149166 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22794117647058823, + "acc_stderr": 0.025483081468029804, + "acc_norm": 0.22794117647058823, + "acc_norm_stderr": 0.025483081468029804 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.026711430555538415, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.026711430555538415 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.029936696387138605, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.029936696387138605 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29986962190352023, + "acc_stderr": 0.011702660860193975, + "acc_norm": 0.29986962190352023, + "acc_norm_stderr": 0.011702660860193975 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.03646204963253813, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.03646204963253813 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2215422276621787, + "mc1_stderr": 0.014537867601301139, + "mc2": 0.37108434422532566, + "mc2_stderr": 0.014734273077370515 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24321133412042503, + "acc_stderr": 0.014750068360453266, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.016327334806429138 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.3", + "model_sha": "10886a4299e7060b16236abf0c743599af49ce1e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.4_7b/result_2023-11-12 23:54:42.json b/genne/otter3.1.4_7b/result_2023-11-12 23:54:42.json new file mode 100644 index 0000000000000000000000000000000000000000..3caeeb824d688956ac03fc11e10ca5627b9abfac --- /dev/null +++ b/genne/otter3.1.4_7b/result_2023-11-12 23:54:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.0127669237941168, + "acc_norm": 0.31569965870307165, + "acc_norm_stderr": 0.01358257109581529 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34554869547898825, + "acc_stderr": 0.0047457495387523176, + "acc_norm": 0.4391555467038439, + "acc_norm_stderr": 0.004952698802275645 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.36257309941520466, + "acc_stderr": 0.036871306155620606, + "acc_norm": 0.36257309941520466, + "acc_norm_stderr": 0.036871306155620606 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3767560664112388, + "acc_stderr": 0.01732829290730305, + "acc_norm": 0.3767560664112388, + "acc_norm_stderr": 0.01732829290730305 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.0357160923005348, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.0357160923005348 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.029376616484945637, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.029376616484945637 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361245, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.258974358974359, + "acc_stderr": 0.022211106810061675, + "acc_norm": 0.258974358974359, + "acc_norm_stderr": 0.022211106810061675 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617715, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029254, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029254 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36752136752136755, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.36752136752136755, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443866, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443866 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.024720713193952158, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.024720713193952158 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367774, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367774 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.36318407960199006, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.36318407960199006, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.022019080012217883, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.022019080012217883 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03309615177059006, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03309615177059006 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30346820809248554, + "acc_stderr": 0.024752411960917205, + "acc_norm": 0.30346820809248554, + "acc_norm_stderr": 0.024752411960917205 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.025702640260603753, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.025702640260603753 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354115, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354115 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28440366972477066, + "acc_stderr": 0.019342036587702578, + "acc_norm": 0.28440366972477066, + "acc_norm_stderr": 0.019342036587702578 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.12698412698412698, + "acc_stderr": 0.02978041752268843, + "acc_norm": 0.12698412698412698, + "acc_norm_stderr": 0.02978041752268843 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.02664327847450875, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.02664327847450875 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03583496176361063, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03583496176361063 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3022875816993464, + "acc_stderr": 0.018579232711113877, + "acc_norm": 0.3022875816993464, + "acc_norm_stderr": 0.018579232711113877 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.026491914727355143, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.026491914727355143 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.02533684856333237, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.02533684856333237 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584353, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.011186109046564611, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.011186109046564611 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520688, + "mc2": 0.5016118824314271, + "mc2_stderr": 0.016997655364904746 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2762691853600944, + "acc_stderr": 0.015373387500464469, + "acc_norm": 0.36835891381345925, + "acc_norm_stderr": 0.016583858982639074 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.4_7b", + "model_sha": "40cfc23e4b84a0cd42d11320942985c48f75ed56", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.4n_7B/result_2023-11-12 23:45:56.json b/genne/otter3.1.4n_7B/result_2023-11-12 23:45:56.json new file mode 100644 index 0000000000000000000000000000000000000000..5ce235e363c49eb712ffd8cc50e3c6ae57aa37f5 --- /dev/null +++ b/genne/otter3.1.4n_7B/result_2023-11-12 23:45:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.01385583128749772, + "acc_norm": 0.4069965870307167, + "acc_norm_stderr": 0.01435639941800913 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39762995419239194, + "acc_stderr": 0.00488407975043389, + "acc_norm": 0.5259908384783908, + "acc_norm_stderr": 0.00498303542023571 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34355044699872284, + "acc_stderr": 0.016982145632652466, + "acc_norm": 0.34355044699872284, + "acc_norm_stderr": 0.016982145632652466 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3729903536977492, + "acc_stderr": 0.027466610213140116, + "acc_norm": 0.3729903536977492, + "acc_norm_stderr": 0.027466610213140116 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34080717488789236, + "acc_stderr": 0.0318114974705536, + "acc_norm": 0.34080717488789236, + "acc_norm_stderr": 0.0318114974705536 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03274287914026868, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03274287914026868 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149353, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149353 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882385, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882385 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.02119363252514854, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.02119363252514854 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.02815283794249386, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.02815283794249386 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2861271676300578, + "acc_stderr": 0.02433214677913413, + "acc_norm": 0.2861271676300578, + "acc_norm_stderr": 0.02433214677913413 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765134, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765134 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.031821550509166484, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.031821550509166484 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3247706422018349, + "acc_stderr": 0.02007772910931032, + "acc_norm": 0.3247706422018349, + "acc_norm_stderr": 0.02007772910931032 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4297520661157025, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590627, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590627 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.0420327729146776, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.0420327729146776 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.028353212866863445, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.028353212866863445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.02503584522771126, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.02503584522771126 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.02783302387139968, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.02783302387139968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.37130801687763715, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.37130801687763715, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2966101694915254, + "acc_stderr": 0.011665946586082847, + "acc_norm": 0.2966101694915254, + "acc_norm_stderr": 0.011665946586082847 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.036462049632538115, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.036462049632538115 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731618, + "mc2": 0.386120854227026, + "mc2_stderr": 0.014932757143046258 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22668240850059032, + "acc_stderr": 0.014394701800505885, + "acc_norm": 0.33766233766233766, + "acc_norm_stderr": 0.016259075784754964 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.4n_7b", + "model_sha": "d4c3b71520aa665560a6ebc06068d3b94da09dc1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.4n_7B/result_2023-11-13 02:39:47.json b/genne/otter3.1.4n_7B/result_2023-11-13 02:39:47.json new file mode 100644 index 0000000000000000000000000000000000000000..fdb99f20c319c072d8816f43257d96eaba7c12bb --- /dev/null +++ b/genne/otter3.1.4n_7B/result_2023-11-13 02:39:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.01385583128749772, + "acc_norm": 0.4069965870307167, + "acc_norm_stderr": 0.01435639941800913 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39762995419239194, + "acc_stderr": 0.00488407975043389, + "acc_norm": 0.5258912567217686, + "acc_norm_stderr": 0.004983087049281747 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34227330779054915, + "acc_stderr": 0.016967031766413624, + "acc_norm": 0.34227330779054915, + "acc_norm_stderr": 0.016967031766413624 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3729903536977492, + "acc_stderr": 0.027466610213140116, + "acc_norm": 0.3729903536977492, + "acc_norm_stderr": 0.027466610213140116 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34080717488789236, + "acc_stderr": 0.0318114974705536, + "acc_norm": 0.34080717488789236, + "acc_norm_stderr": 0.0318114974705536 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03274287914026868, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03274287914026868 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149353, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149353 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882385, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882385 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.02119363252514854, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.02119363252514854 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.02815283794249386, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.02815283794249386 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.024405173935783234, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.024405173935783234 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765134, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765134 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.031821550509166484, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.031821550509166484 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3229357798165138, + "acc_stderr": 0.020048115923415325, + "acc_norm": 0.3229357798165138, + "acc_norm_stderr": 0.020048115923415325 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4297520661157025, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590627, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590627 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.0420327729146776, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.0420327729146776 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859676, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859676 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02518778666022726, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02518778666022726 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.027979823538744543, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.027979823538744543 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.37130801687763715, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.37130801687763715, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29791395045632335, + "acc_stderr": 0.011680717340400035, + "acc_norm": 0.29791395045632335, + "acc_norm_stderr": 0.011680717340400035 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.036462049632538115, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.036462049632538115 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731618, + "mc2": 0.38613822493621114, + "mc2_stderr": 0.014932812065930626 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22668240850059032, + "acc_stderr": 0.014394701800505885, + "acc_norm": 0.33766233766233766, + "acc_norm_stderr": 0.016259075784754964 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.4n_7B", + "model_sha": "d4c3b71520aa665560a6ebc06068d3b94da09dc1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.6n_13b/result_2023-11-20 00:19:47.json b/genne/otter3.1.6n_13b/result_2023-11-20 00:19:47.json new file mode 100644 index 0000000000000000000000000000000000000000..22834e63e5b47744a7e9b5d3fc509eb515f26157 --- /dev/null +++ b/genne/otter3.1.6n_13b/result_2023-11-20 00:19:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36689419795221845, + "acc_stderr": 0.014084133118104296, + "acc_norm": 0.4189419795221843, + "acc_norm_stderr": 0.01441810695363901 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39892451702848036, + "acc_stderr": 0.004886764243204049, + "acc_norm": 0.5270862378012349, + "acc_norm_stderr": 0.004982454383162064 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48659003831417624, + "acc_stderr": 0.017873531736510392, + "acc_norm": 0.48659003831417624, + "acc_norm_stderr": 0.017873531736510392 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400352, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400352 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956278, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956278 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.03979236637497411, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.03979236637497411 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.024756000382130945, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.024756000382130945 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.02727389059430065, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.02727389059430065 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641086, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641086 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228416, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360383, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360383 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159665, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159665 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112126, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112126 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.430635838150289, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.430635838150289, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.03499807276193339, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.03499807276193339 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254889, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254889 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.027780141207023355, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.027780141207023355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.045190820213197744, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.045190820213197744 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.01962744474841224, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.01962744474841224 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005357, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005357 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.02736586113151381, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.02736586113151381 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2985658409387223, + "acc_stderr": 0.01168806014179422, + "acc_norm": 0.2985658409387223, + "acc_norm_stderr": 0.01168806014179422 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707687, + "mc2": 0.40503923056987007, + "mc2_stderr": 0.01620658438032362 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31641086186540734, + "acc_stderr": 0.015989617951065474, + "acc_norm": 0.3754427390791027, + "acc_norm_stderr": 0.01664841158951109 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.6n_13b", + "model_sha": "3b9d64a18215b8ef074d0aa79ad73bf273193068", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.6n_13b_pre/result_2023-11-21 00:04:57.json b/genne/otter3.1.6n_13b_pre/result_2023-11-21 00:04:57.json new file mode 100644 index 0000000000000000000000000000000000000000..734b28d42bc4011f32b9196adfa9299c54d69753 --- /dev/null +++ b/genne/otter3.1.6n_13b_pre/result_2023-11-21 00:04:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4138225255972696, + "acc_stderr": 0.014392730009221007, + "acc_norm": 0.48293515358361777, + "acc_norm_stderr": 0.014602878388536598 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4402509460266879, + "acc_stderr": 0.004954026775425767, + "acc_norm": 0.5984863572993427, + "acc_norm_stderr": 0.004892026457294709 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.01787574884024241, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.01787574884024241 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.028397944907806612, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.028397944907806612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792399, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792399 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714506, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714506 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03196876989195778, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03196876989195778 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.0236369759961018, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.0236369759961018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.021424291871853147, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.021424291871853147 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523809, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523809 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618065, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618065 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422283, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422283 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.011787910251664585, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.011787910251664585 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557961, + "mc2": 0.4153807034736448, + "mc2_stderr": 0.015135333189478775 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3105076741440378, + "acc_stderr": 0.01590800452876201, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076568 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.6n_13b_pre", + "model_sha": "e9ec62fda916f9254d7fe6afe2f55404599a7fa1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gl2een/llama2-13b-instruct-full-fintune/result_2023-11-24 03:45:54.json b/gl2een/llama2-13b-instruct-full-fintune/result_2023-11-24 03:45:54.json new file mode 100644 index 0000000000000000000000000000000000000000..35ae03c89d0dc14271f64d69063462cefad1b772 --- /dev/null +++ b/gl2een/llama2-13b-instruct-full-fintune/result_2023-11-24 03:45:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37372013651877134, + "acc_stderr": 0.014137708601759091, + "acc_norm": 0.42235494880546076, + "acc_norm_stderr": 0.014434138713379991 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40360485958972314, + "acc_stderr": 0.004896173035943316, + "acc_norm": 0.5403306114319857, + "acc_norm_stderr": 0.004973522582431206 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.017862091778507855, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.017862091778507855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894265, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894265 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.02475600038213094, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.02475600038213094 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091855, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091855 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261114, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261114 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4421965317919075, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.4421965317919075, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261837, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261837 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.021406952688151577, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.021406952688151577 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848877, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848877 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3349673202614379, + "acc_stderr": 0.01909422816700032, + "acc_norm": 0.3349673202614379, + "acc_norm_stderr": 0.01909422816700032 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.0279715413701706, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.0279715413701706 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2848761408083442, + "acc_stderr": 0.011527830846369012, + "acc_norm": 0.2848761408083442, + "acc_norm_stderr": 0.011527830846369012 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875833, + "mc2": 0.43105567225485036, + "mc2_stderr": 0.015011642206271197 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38961038961038963, + "acc_stderr": 0.0167661616718935, + "acc_norm": 0.45690672963400236, + "acc_norm_stderr": 0.017126389093086777 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gl2een/llama2-13b-instruct-full-fintune", + "model_sha": "2a6e2457ca85d5810b55f2e90b36637f2ed4e695", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gl2een/polyglot-ko-12.8b-instrcut-full-finetune2/result_2023-10-30 06:20:20.json b/gl2een/polyglot-ko-12.8b-instrcut-full-finetune2/result_2023-10-30 06:20:20.json new file mode 100644 index 0000000000000000000000000000000000000000..40e5cdacefe91d6195b658eb87ddb64d8cf112aa --- /dev/null +++ b/gl2een/polyglot-ko-12.8b-instrcut-full-finetune2/result_2023-10-30 06:20:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537364, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38926508663612824, + "acc_stderr": 0.004865871290143343, + "acc_norm": 0.5028878709420435, + "acc_norm_stderr": 0.004989698183207819 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20561941251596424, + "acc_stderr": 0.014452500456785823, + "acc_norm": 0.20561941251596424, + "acc_norm_stderr": 0.014452500456785823 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.16265060240963855, + "acc_stderr": 0.028730237892613787, + "acc_norm": 0.16265060240963855, + "acc_norm_stderr": 0.028730237892613787 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.025218040373410622, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.025218040373410622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.09417040358744394, + "acc_stderr": 0.019602162350340513, + "acc_norm": 0.09417040358744394, + "acc_norm_stderr": 0.019602162350340513 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117317, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117317 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.03446897738659333, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.03446897738659333 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3153846153846154, + "acc_stderr": 0.02355964698318995, + "acc_norm": 0.3153846153846154, + "acc_norm_stderr": 0.02355964698318995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22167487684729065, + "acc_stderr": 0.029225575892489614, + "acc_norm": 0.22167487684729065, + "acc_norm_stderr": 0.029225575892489614 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022895, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022895 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02723601394619666, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02723601394619666 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.02815283794249386, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.02815283794249386 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721376, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721376 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2085889570552147, + "acc_stderr": 0.03192193448934726, + "acc_norm": 0.2085889570552147, + "acc_norm_stderr": 0.03192193448934726 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2191358024691358, + "acc_stderr": 0.023016705640262206, + "acc_norm": 0.2191358024691358, + "acc_norm_stderr": 0.023016705640262206 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3412844036697248, + "acc_stderr": 0.020328612816592435, + "acc_norm": 0.3412844036697248, + "acc_norm_stderr": 0.020328612816592435 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818737, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818737 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036843, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036843 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1652892561983471, + "acc_stderr": 0.033907806129727755, + "acc_norm": 0.1652892561983471, + "acc_norm_stderr": 0.033907806129727755 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.01728276069516742, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.01728276069516742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.02484792135806396, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.02484792135806396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.033213611069662696, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.033213611069662696 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.030555316755573637, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.030555316755573637 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2109704641350211, + "acc_stderr": 0.02655837250266192, + "acc_norm": 0.2109704641350211, + "acc_norm_stderr": 0.02655837250266192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113892, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113892 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967409, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967409 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548298, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548298 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.01510240479735965, + "mc2": 0.39935660614901936, + "mc2_stderr": 0.014712360794626336 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33293978748524206, + "acc_stderr": 0.01620243120837379, + "acc_norm": 0.44037780401416765, + "acc_norm_stderr": 0.01706769977431299 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gl2een/polyglot-ko-12.8b-instrcut-full-finetune2", + "model_sha": "ff5e25810aa9d6ca4bc65f7504dac285df05e907", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/1222-42dot-1.3B-Ko-CoT-Collection-2e-5/result_2023-12-23 05:39:38.json b/heegyu/1222-42dot-1.3B-Ko-CoT-Collection-2e-5/result_2023-12-23 05:39:38.json new file mode 100644 index 0000000000000000000000000000000000000000..20648c831350d58b211ba0fb40e623776b4ff718 --- /dev/null +++ b/heegyu/1222-42dot-1.3B-Ko-CoT-Collection-2e-5/result_2023-12-23 05:39:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29266211604095566, + "acc_stderr": 0.013295916103619406, + "acc_norm": 0.34726962457337884, + "acc_norm_stderr": 0.013913034529620444 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3556064528978291, + "acc_stderr": 0.004777183508949815, + "acc_norm": 0.4439354710217088, + "acc_norm_stderr": 0.004958314114266502 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2554278416347382, + "acc_stderr": 0.015594955384455765, + "acc_norm": 0.2554278416347382, + "acc_norm_stderr": 0.015594955384455765 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628834, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628834 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632938, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632938 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728744, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728744 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.031353050095330855, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.031353050095330855 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774632, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774632 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.02894200404099817, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.02894200404099817 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.023060438380857737, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.023060438380857737 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854933, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854933 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885196, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885196 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.026616482980501715, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.026616482980501715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.02752859921034049, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.02752859921034049 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031708, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031708 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.25722543352601157, + "acc_stderr": 0.023532925431044283, + "acc_norm": 0.25722543352601157, + "acc_norm_stderr": 0.023532925431044283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.025630824975621334, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.025630824975621334 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735703, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735703 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25504587155963304, + "acc_stderr": 0.01868850085653584, + "acc_norm": 0.25504587155963304, + "acc_norm_stderr": 0.01868850085653584 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790605, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790605 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.025553169991826517, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.025553169991826517 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.16, + "acc_stderr": 0.036845294917747094, + "acc_norm": 0.16, + "acc_norm_stderr": 0.036845294917747094 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347019, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347019 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653063, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653063 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220513, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220513 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.028666857790274648, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.028666857790274648 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3206751054852321, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.3206751054852321, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045502, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045502 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.01494881267906214, + "mc2": 0.39153887048235514, + "mc2_stderr": 0.014723691720257542 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2255017709563164, + "acc_stderr": 0.01436812214953219, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.01632733480642914 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/1222-42dot-1.3B-Ko-CoT-Collection-2e-5", + "model_sha": "af8700ffcbcc7e3b1db1fb5688d42cbdc7201644", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/42dot-1.3B-KOR-OpenOrca-Platypus-1e-5/result_2023-11-28 14:58:23.json b/heegyu/42dot-1.3B-KOR-OpenOrca-Platypus-1e-5/result_2023-11-28 14:58:23.json new file mode 100644 index 0000000000000000000000000000000000000000..d169a3c1fc3efb6724254cd1d68feb289cef62a2 --- /dev/null +++ b/heegyu/42dot-1.3B-KOR-OpenOrca-Platypus-1e-5/result_2023-11-28 14:58:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28071672354948807, + "acc_stderr": 0.013131238126975583, + "acc_norm": 0.3361774744027304, + "acc_norm_stderr": 0.013804855026205758 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3580959968133838, + "acc_stderr": 0.004784607222774637, + "acc_norm": 0.45279824736108343, + "acc_norm_stderr": 0.004967497130451344 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209196, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209196 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398698, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614866, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614866 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.02880998985410297, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.02880998985410297 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.036293353299478595, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.036293353299478595 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632938, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632938 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2242152466367713, + "acc_stderr": 0.027991534258519527, + "acc_norm": 0.2242152466367713, + "acc_norm_stderr": 0.027991534258519527 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.040393149787245626, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.040393149787245626 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.032894773300986155, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.032894773300986155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.03375672449560554, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.03375672449560554 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886838, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886838 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830513, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144445, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594525, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594525 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.02760192138141759, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.02760192138141759 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940589, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940589 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959316, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959316 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3034825870646766, + "acc_stderr": 0.032510068164586174, + "acc_norm": 0.3034825870646766, + "acc_norm_stderr": 0.032510068164586174 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.0358687928008034, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.0358687928008034 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.023176298203992002, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.023176298203992002 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2808641975308642, + "acc_stderr": 0.02500646975579921, + "acc_norm": 0.2808641975308642, + "acc_norm_stderr": 0.02500646975579921 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.034588160421810066, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.034588160421810066 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30458715596330277, + "acc_stderr": 0.019732299420354038, + "acc_norm": 0.30458715596330277, + "acc_norm_stderr": 0.019732299420354038 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02564686309713792, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02564686309713792 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.034260594244031654, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.034260594244031654 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.01798661530403031, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.01798661530403031 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.02866685779027465, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.02866685779027465 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293423, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25358539765319427, + "acc_stderr": 0.011111715336101138, + "acc_norm": 0.25358539765319427, + "acc_norm_stderr": 0.011111715336101138 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.028867431449849313, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.028867431449849313 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826824, + "mc2": 0.406729238769047, + "mc2_stderr": 0.01490927480363471 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2408500590318772, + "acc_stderr": 0.01470117266258392, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.01653869160332771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/42dot-1.3B-KOR-OpenOrca-Platypus-1e-5", + "model_sha": "56a4dde8ef71b89abad939d88ef1f23d12442ae6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/42dot_LLM-PLM-1.3B-mt-steps-50000/result_2023-10-29 14:09:52.json b/heegyu/42dot_LLM-PLM-1.3B-mt-steps-50000/result_2023-10-29 14:09:52.json new file mode 100644 index 0000000000000000000000000000000000000000..f2ea3a922909dfb9ec4f2a7d0da6c6143112ff86 --- /dev/null +++ b/heegyu/42dot_LLM-PLM-1.3B-mt-steps-50000/result_2023-10-29 14:09:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2636518771331058, + "acc_stderr": 0.012875929151297061, + "acc_norm": 0.3216723549488055, + "acc_norm_stderr": 0.013650488084494164 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3345947022505477, + "acc_stderr": 0.004708842600177437, + "acc_norm": 0.41884086835291773, + "acc_norm_stderr": 0.004923609207861533 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.031267817146631786, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.031267817146631786 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.14563106796116504, + "acc_stderr": 0.03492606476623792, + "acc_norm": 0.14563106796116504, + "acc_norm_stderr": 0.03492606476623792 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27330779054916987, + "acc_stderr": 0.015936681062628556, + "acc_norm": 0.27330779054916987, + "acc_norm_stderr": 0.015936681062628556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.0261488180184245, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0261488180184245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.033844291552331346, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.033844291552331346 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.02960510321703834, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.02960510321703834 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.037932811853078084, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.037932811853078084 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.021444547301560486, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.021444547301560486 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.03893542518824847, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.03893542518824847 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.02874898368994107, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.02874898368994107 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.02489246917246283, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.02489246917246283 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.1794871794871795, + "acc_stderr": 0.02514093595033545, + "acc_norm": 0.1794871794871795, + "acc_norm_stderr": 0.02514093595033545 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.02461829819586651, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02461829819586651 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918424, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25, + "acc_stderr": 0.02409347123262133, + "acc_norm": 0.25, + "acc_norm_stderr": 0.02409347123262133 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21834862385321102, + "acc_stderr": 0.017712600528722738, + "acc_norm": 0.21834862385321102, + "acc_norm_stderr": 0.017712600528722738 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.033954900208561116, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.033954900208561116 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.024630048979824775, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.024630048979824775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2434640522875817, + "acc_stderr": 0.017362473762146616, + "acc_norm": 0.2434640522875817, + "acc_norm_stderr": 0.017362473762146616 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.02612957252718085, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.02612957252718085 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010083, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010083 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682487, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784593, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784593 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25945241199478486, + "acc_stderr": 0.0111952620763503, + "acc_norm": 0.25945241199478486, + "acc_norm_stderr": 0.0111952620763503 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.03096451792692341, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.03096451792692341 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.031922715695483, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.031922715695483 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.40710844276153646, + "mc2_stderr": 0.014826228669308838 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2526564344746163, + "acc_stderr": 0.014939640598798442, + "acc_norm": 0.3116883116883117, + "acc_norm_stderr": 0.015924567607358345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/42dot_LLM-PLM-1.3B-mt-steps-50000", + "model_sha": "96228db523495871e1b856e0f29d82eb0efd9d2d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/42dot_LLM-PLM-1.3B-mt/result_2023-10-15 11:19:51.json b/heegyu/42dot_LLM-PLM-1.3B-mt/result_2023-10-15 11:19:51.json new file mode 100644 index 0000000000000000000000000000000000000000..cc10300a6b5890b0ace635ecc2df4620ad526850 --- /dev/null +++ b/heegyu/42dot_LLM-PLM-1.3B-mt/result_2023-10-15 11:19:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2175767918088737, + "acc_stderr": 0.012057262020972506, + "acc_norm": 0.2627986348122867, + "acc_norm_stderr": 0.012862523175351331 + }, + "harness|ko_hellaswag|10": { + "acc": 0.31447918741286596, + "acc_stderr": 0.004633592029065801, + "acc_norm": 0.37890858394742083, + "acc_norm_stderr": 0.004841238763529378 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22349936143039592, + "acc_stderr": 0.01489723522945071, + "acc_norm": 0.22349936143039592, + "acc_norm_stderr": 0.01489723522945071 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.02895734278834235, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.02895734278834235 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511116, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511116 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19282511210762332, + "acc_stderr": 0.026478240960489365, + "acc_norm": 0.19282511210762332, + "acc_norm_stderr": 0.026478240960489365 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.03427308652999934, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.03427308652999934 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.0302839955258844, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.0302839955258844 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243838, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243838 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.02582210611941589, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.02582210611941589 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.02528839450289137, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.02528839450289137 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.19402985074626866, + "acc_stderr": 0.02796267760476893, + "acc_norm": 0.19402985074626866, + "acc_norm_stderr": 0.02796267760476893 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1791907514450867, + "acc_stderr": 0.02924251305906327, + "acc_norm": 0.1791907514450867, + "acc_norm_stderr": 0.02924251305906327 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.023468429832451163, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.023468429832451163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27461139896373055, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.27461139896373055, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3522935779816514, + "acc_stderr": 0.020480568843999, + "acc_norm": 0.3522935779816514, + "acc_norm_stderr": 0.020480568843999 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.024848018263875195, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.024848018263875195 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.23140495867768596, + "acc_stderr": 0.03849856098794087, + "acc_norm": 0.23140495867768596, + "acc_norm_stderr": 0.03849856098794087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.037150621549989035, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.037150621549989035 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.017242385828779606, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.017242385828779606 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872405, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.03018753206032938, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.03018753206032938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145315, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145315 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22784810126582278, + "acc_stderr": 0.02730348459906942, + "acc_norm": 0.22784810126582278, + "acc_norm_stderr": 0.02730348459906942 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.01099615663514269, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.01099615663514269 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.0291022543896741, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.0291022543896741 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.0151274270965207, + "mc2": 0.3837063373774927, + "mc2_stderr": 0.01511245687075564 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2668240850059032, + "acc_stderr": 0.015206575684565885, + "acc_norm": 0.30342384887839435, + "acc_norm_stderr": 0.01580607271790957 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/42dot_LLM-PLM-1.3B-mt", + "model_sha": "b6ad84ea5edcb8b397824634213e7008f08fbd06", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/AULM-5.8b-v0804-hf/result_2023-10-15 11:21:40.json b/heegyu/AULM-5.8b-v0804-hf/result_2023-10-15 11:21:40.json new file mode 100644 index 0000000000000000000000000000000000000000..5888b60da9bec97b2da0f5396b91dab07433d128 --- /dev/null +++ b/heegyu/AULM-5.8b-v0804-hf/result_2023-10-15 11:21:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2619453924914676, + "acc_stderr": 0.012849054826858114, + "acc_norm": 0.3302047781569966, + "acc_norm_stderr": 0.013743085603760427 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3604859589723163, + "acc_stderr": 0.004791601975612766, + "acc_norm": 0.45429197371041624, + "acc_norm_stderr": 0.004968888130290065 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.1871345029239766, + "acc_stderr": 0.029913127232368025, + "acc_norm": 0.1871345029239766, + "acc_norm_stderr": 0.029913127232368025 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21966794380587484, + "acc_stderr": 0.014805384478371163, + "acc_norm": 0.21966794380587484, + "acc_norm_stderr": 0.014805384478371163 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039783, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039783 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.02575586592263294, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.02575586592263294 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11659192825112108, + "acc_stderr": 0.02153963981624447, + "acc_norm": 0.11659192825112108, + "acc_norm_stderr": 0.02153963981624447 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.032586303838365555, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.032586303838365555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.030176808288974337, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.030176808288974337 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.32564102564102565, + "acc_stderr": 0.02375966576741229, + "acc_norm": 0.32564102564102565, + "acc_norm_stderr": 0.02375966576741229 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335134, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335134 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.02749566368372406, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.02749566368372406 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960954, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960954 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.034355680560478746, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.034355680560478746 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184756, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184756 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508297, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508297 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.03351953879521272, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.03351953879521272 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.19753086419753085, + "acc_stderr": 0.02215288992789894, + "acc_norm": 0.19753086419753085, + "acc_norm_stderr": 0.02215288992789894 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32124352331606215, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.32124352331606215, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3669724770642202, + "acc_stderr": 0.02066467565952053, + "acc_norm": 0.3669724770642202, + "acc_norm_stderr": 0.02066467565952053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.026173908506718576, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.026173908506718576 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1322314049586777, + "acc_stderr": 0.030922788320445812, + "acc_norm": 0.1322314049586777, + "acc_norm_stderr": 0.030922788320445812 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.21631205673758866, + "acc_stderr": 0.024561720560562793, + "acc_norm": 0.21631205673758866, + "acc_norm_stderr": 0.024561720560562793 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22362869198312235, + "acc_stderr": 0.027123298205229972, + "acc_norm": 0.22362869198312235, + "acc_norm_stderr": 0.027123298205229972 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24967405475880053, + "acc_stderr": 0.011054538377832327, + "acc_norm": 0.24967405475880053, + "acc_norm_stderr": 0.011054538377832327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015016, + "mc2": 0.40797537743571977, + "mc2_stderr": 0.014976707161150397 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2975206611570248, + "acc_stderr": 0.01571774220508993, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.01663791778979874 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/AULM-5.8b-v0804-hf", + "model_sha": "ddcfd46cc8b42d7fb6ad822d97b6c30dfd3c028b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/LIMA-13b-hf/result_2023-10-15 11:18:07.json b/heegyu/LIMA-13b-hf/result_2023-10-15 11:18:07.json new file mode 100644 index 0000000000000000000000000000000000000000..dc8c89a261dfe210566fc34b7af9f6002f627b50 --- /dev/null +++ b/heegyu/LIMA-13b-hf/result_2023-10-15 11:18:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21331058020477817, + "acc_stderr": 0.011970971742326334, + "acc_norm": 0.26023890784982934, + "acc_norm_stderr": 0.012821930225112552 + }, + "harness|ko_hellaswag|10": { + "acc": 0.30302728540131446, + "acc_stderr": 0.004586276903267076, + "acc_norm": 0.3558056164110735, + "acc_norm_stderr": 0.00477778258481779 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.36257309941520466, + "acc_stderr": 0.036871306155620606, + "acc_norm": 0.36257309941520466, + "acc_norm_stderr": 0.036871306155620606 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.30268199233716475, + "acc_stderr": 0.01642878158174936, + "acc_norm": 0.30268199233716475, + "acc_norm_stderr": 0.01642878158174936 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03820169914517905, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03820169914517905 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.03013590647851756, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.03013590647851756 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.026236965881153266, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.026236965881153266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178815, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178815 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03960933549451209, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03960933549451209 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376536, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376536 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3277310924369748, + "acc_stderr": 0.03048991141767323, + "acc_norm": 0.3277310924369748, + "acc_norm_stderr": 0.03048991141767323 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.02416278028401772, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.02416278028401772 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.452991452991453, + "acc_stderr": 0.032610998730986204, + "acc_norm": 0.452991452991453, + "acc_norm_stderr": 0.032610998730986204 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.02761116340239972, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.02761116340239972 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.025040443877000693, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.025040443877000693 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360385, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360385 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267437, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267437 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0251901813276084, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0251901813276084 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409814, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409814 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.037124548537213684, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.037124548537213684 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30825688073394497, + "acc_stderr": 0.01979836669836725, + "acc_norm": 0.30825688073394497, + "acc_norm_stderr": 0.01979836669836725 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.035834961763610625, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.035834961763610625 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.018311653053648222, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.018311653053648222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537773, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537773 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915206, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915206 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319463, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319463 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02576725201085595, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02576725201085595 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789848, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789848 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.21940928270042195, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.21940928270042195, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26792698826597133, + "acc_stderr": 0.011311347690633886, + "acc_norm": 0.26792698826597133, + "acc_norm_stderr": 0.011311347690633886 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501943, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501943 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875835, + "mc2": 0.43296733660801473, + "mc2_stderr": 0.015927191551239974 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2255017709563164, + "acc_stderr": 0.01436812214953218, + "acc_norm": 0.30460448642266824, + "acc_norm_stderr": 0.01582336727312938 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/LIMA-13b-hf", + "model_sha": "98faa74a9b41cbd9033904cd58420705936849eb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/LIMA2-7b-hf/result_2023-10-15 11:18:57.json b/heegyu/LIMA2-7b-hf/result_2023-10-15 11:18:57.json new file mode 100644 index 0000000000000000000000000000000000000000..30cc2bf12bc48e9112636b08b047003c600b77ce --- /dev/null +++ b/heegyu/LIMA2-7b-hf/result_2023-10-15 11:18:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26706484641638223, + "acc_stderr": 0.01292893319649633, + "acc_norm": 0.3046075085324232, + "acc_norm_stderr": 0.013449522109932492 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3309101772555268, + "acc_stderr": 0.004695791340502858, + "acc_norm": 0.4010157339175463, + "acc_norm_stderr": 0.0048910255336330226 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2937420178799489, + "acc_stderr": 0.016287759388491675, + "acc_norm": 0.2937420178799489, + "acc_norm_stderr": 0.016287759388491675 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2297872340425532, + "acc_stderr": 0.027501752944412424, + "acc_norm": 0.2297872340425532, + "acc_norm_stderr": 0.027501752944412424 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.03329394119073532, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.03329394119073532 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998482, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998482 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.040393149787245605, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.040393149787245605 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35858585858585856, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.35858585858585856, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.028942004040998167, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.028942004040998167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3153846153846154, + "acc_stderr": 0.023559646983189957, + "acc_norm": 0.3153846153846154, + "acc_norm_stderr": 0.023559646983189957 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.32903225806451614, + "acc_stderr": 0.02672949906834996, + "acc_norm": 0.32903225806451614, + "acc_norm_stderr": 0.02672949906834996 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493857, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493857 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.36318407960199006, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.36318407960199006, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889904, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889904 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.02320139293819498, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.02320139293819498 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388677003, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388677003 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33641975308641975, + "acc_stderr": 0.02628973494595293, + "acc_norm": 0.33641975308641975, + "acc_norm_stderr": 0.02628973494595293 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32124352331606215, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.32124352331606215, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30091743119266057, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.30091743119266057, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.027057974624494382, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.027057974624494382 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.041032038305145124, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.041032038305145124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.017952449196987866, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.017952449196987866 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.01444415780826145, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.01444415780826145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932267, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932267 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16911764705882354, + "acc_stderr": 0.02277086801011303, + "acc_norm": 0.16911764705882354, + "acc_norm_stderr": 0.02277086801011303 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.03011642629654059, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.03011642629654059 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26988265971316816, + "acc_stderr": 0.011337381084250404, + "acc_norm": 0.26988265971316816, + "acc_norm_stderr": 0.011337381084250404 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.0313217980308329, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.0313217980308329 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.45638880812290744, + "mc2_stderr": 0.01588078280533526 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24557260920897284, + "acc_stderr": 0.014798357154972823, + "acc_norm": 0.2987012987012987, + "acc_norm_stderr": 0.015735657391438285 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/LIMA2-7b-hf", + "model_sha": "6a1aa59cb7624f059728840ce68b20b1070ebdcb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/Mistral-7B-v0.1-OKI-v20231124-1e-5/result_2023-11-27 11:21:57.json b/heegyu/Mistral-7B-v0.1-OKI-v20231124-1e-5/result_2023-11-27 11:21:57.json new file mode 100644 index 0000000000000000000000000000000000000000..7a1de2160ac2cb36c3c5404134d6d653e36a12ad --- /dev/null +++ b/heegyu/Mistral-7B-v0.1-OKI-v20231124-1e-5/result_2023-11-27 11:21:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.013928933461382501, + "acc_norm": 0.39419795221843, + "acc_norm_stderr": 0.01428052266746732 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37582154949213303, + "acc_stderr": 0.004833444556338624, + "acc_norm": 0.4905397331208923, + "acc_norm_stderr": 0.004988888194063274 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.017869330154003705, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.017869330154003705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079021, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079021 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.02527589207024063, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.02527589207024063 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785742, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785742 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.02891120880274947, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.02891120880274947 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342654, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342654 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348913, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.039158572914369714, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.039158572914369714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949098, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949098 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762633, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762633 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010212, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010212 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963753, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963753 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842883, + "mc2": 0.45246749534262715, + "mc2_stderr": 0.015187331640958925 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.01719034212344866, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.0171427361176433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/Mistral-7B-v0.1-OKI-v20231124-1e-5", + "model_sha": "34992c8cda5bbebf1cfbf5d0d0ecc71dcbaa77e0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/WizardVicuna-3B-0719/result_2023-10-15 11:20:31.json b/heegyu/WizardVicuna-3B-0719/result_2023-10-15 11:20:31.json new file mode 100644 index 0000000000000000000000000000000000000000..7853cf5b44b55ad1a96f4d98c9a42d9ec3ef8462 --- /dev/null +++ b/heegyu/WizardVicuna-3B-0719/result_2023-10-15 11:20:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19112627986348124, + "acc_stderr": 0.011490055292778599, + "acc_norm": 0.22781569965870307, + "acc_norm_stderr": 0.012256708602326907 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2822146982672774, + "acc_stderr": 0.004491574539441884, + "acc_norm": 0.30770762796255724, + "acc_norm_stderr": 0.004606015773125627 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24393358876117496, + "acc_stderr": 0.015357212665829479, + "acc_norm": 0.24393358876117496, + "acc_norm_stderr": 0.015357212665829479 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.030135906478517563, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.030135906478517563 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621963, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621963 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.031381476375754995, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.031381476375754995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1919191919191919, + "acc_stderr": 0.028057791672989017, + "acc_norm": 0.1919191919191919, + "acc_norm_stderr": 0.028057791672989017 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774633, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774633 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.027205371538279472, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.027205371538279472 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371383, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371383 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678243, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678243 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22903225806451613, + "acc_stderr": 0.023904914311782658, + "acc_norm": 0.22903225806451613, + "acc_norm_stderr": 0.023904914311782658 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106737, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106737 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473835, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473835 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.02970528405677243, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.02970528405677243 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.020842290930114665, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.020842290930114665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587403, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.033519538795212696, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.033519538795212696 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.16580310880829016, + "acc_stderr": 0.026839845022314415, + "acc_norm": 0.16580310880829016, + "acc_norm_stderr": 0.026839845022314415 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.2036697247706422, + "acc_stderr": 0.017266742087630783, + "acc_norm": 0.2036697247706422, + "acc_norm_stderr": 0.017266742087630783 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.041733491480835, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.041733491480835 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.03391160934343604, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.03391160934343604 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.01755581809132227, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.01755581809132227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953775, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1712962962962963, + "acc_stderr": 0.025695341643824688, + "acc_norm": 0.1712962962962963, + "acc_norm_stderr": 0.025695341643824688 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.01410222362315258, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.01410222362315258 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.026882144922307744, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.026882144922307744 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.02845882099146029, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.02845882099146029 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2301173402868318, + "acc_stderr": 0.010750183177375559, + "acc_norm": 0.2301173402868318, + "acc_norm_stderr": 0.010750183177375559 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.0315841532404771, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.0315841532404771 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394805, + "mc2": 0.4653887573676535, + "mc2_stderr": 0.01614389294463642 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22077922077922077, + "acc_stderr": 0.014260152803540035, + "acc_norm": 0.27863046044864226, + "acc_norm_stderr": 0.015413739494345689 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/WizardVicuna-3B-0719", + "model_sha": "66621ebc9e2fa15e4fe229dfbea725c916cb7c5e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/WizardVicuna-open-llama-3b-v2/result_2023-10-14 16:00:50.json b/heegyu/WizardVicuna-open-llama-3b-v2/result_2023-10-14 16:00:50.json new file mode 100644 index 0000000000000000000000000000000000000000..8ba997695f5af29a75b5478934ed1dcd72594916 --- /dev/null +++ b/heegyu/WizardVicuna-open-llama-3b-v2/result_2023-10-14 16:00:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20563139931740615, + "acc_stderr": 0.011810745260742581, + "acc_norm": 0.257679180887372, + "acc_norm_stderr": 0.012780770562768414 + }, + "harness|ko_hellaswag|10": { + "acc": 0.27693686516630156, + "acc_stderr": 0.004465704810893538, + "acc_norm": 0.30611431985660226, + "acc_norm_stderr": 0.004599358920909526 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.03645981377388807, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.03645981377388807 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24904214559386972, + "acc_stderr": 0.015464676163395983, + "acc_norm": 0.24904214559386972, + "acc_norm_stderr": 0.015464676163395983 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.02895734278834235, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.02895734278834235 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.20257234726688103, + "acc_stderr": 0.022827317491059682, + "acc_norm": 0.20257234726688103, + "acc_norm_stderr": 0.022827317491059682 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.030216831011508762, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.030216831011508762 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596918, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596918 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.029126522834586825, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.029126522834586825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.03695183311650232, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.03695183311650232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863773, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863773 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.021107730127243998, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.021107730127243998 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22258064516129034, + "acc_stderr": 0.02366421667164251, + "acc_norm": 0.22258064516129034, + "acc_norm_stderr": 0.02366421667164251 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674054, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674054 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.025907897122408173, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.025907897122408173 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577622, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.034089978868575295, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.034089978868575295 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2808641975308642, + "acc_stderr": 0.025006469755799215, + "acc_norm": 0.2808641975308642, + "acc_norm_stderr": 0.025006469755799215 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.02951928261681725, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.02951928261681725 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.2036697247706422, + "acc_stderr": 0.01726674208763079, + "acc_norm": 0.2036697247706422, + "acc_norm_stderr": 0.01726674208763079 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102148, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102148 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053435, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510934, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510934 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976264, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976264 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.025336848563332372, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.025336848563332372 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2301173402868318, + "acc_stderr": 0.01075018317737556, + "acc_norm": 0.2301173402868318, + "acc_norm_stderr": 0.01075018317737556 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.029554292605695063, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.029554292605695063 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396718, + "mc2": 0.46188658792557263, + "mc2_stderr": 0.016386200757722597 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21487603305785125, + "acc_stderr": 0.01412140552290331, + "acc_norm": 0.28807556080283353, + "acc_norm_stderr": 0.015569869674838374 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/WizardVicuna-open-llama-3b-v2", + "model_sha": "0946550dfbf40d926d6ba816d0ca13e9c810fa72", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/WizardVicuna2-13b-hf/result_2023-10-15 11:17:29.json b/heegyu/WizardVicuna2-13b-hf/result_2023-10-15 11:17:29.json new file mode 100644 index 0000000000000000000000000000000000000000..9b9449f46e8a157627f2cdfc02ec62742d956259 --- /dev/null +++ b/heegyu/WizardVicuna2-13b-hf/result_2023-10-15 11:17:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2551194539249147, + "acc_stderr": 0.012739038695202105, + "acc_norm": 0.3191126279863481, + "acc_norm_stderr": 0.013621696119173306 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32812188807010556, + "acc_stderr": 0.0046856987521048075, + "acc_norm": 0.39225253933479387, + "acc_norm_stderr": 0.004872546302641858 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.017268607560005773, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.017268607560005773 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740749, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740749 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3408360128617363, + "acc_stderr": 0.026920841260776165, + "acc_norm": 0.3408360128617363, + "acc_norm_stderr": 0.026920841260776165 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35858585858585856, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.35858585858585856, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3067226890756303, + "acc_stderr": 0.029953823891887048, + "acc_norm": 0.3067226890756303, + "acc_norm_stderr": 0.029953823891887048 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.023119362758232294, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.023119362758232294 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.027273890594300642, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.027273890594300642 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49145299145299143, + "acc_stderr": 0.032751303000970296, + "acc_norm": 0.49145299145299143, + "acc_norm_stderr": 0.032751303000970296 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3471698113207547, + "acc_stderr": 0.029300101705549655, + "acc_norm": 0.3471698113207547, + "acc_norm_stderr": 0.029300101705549655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.047093069786618966, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.047093069786618966 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047873, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047873 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240016, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.025624723994030457, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.025624723994030457 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.02700252103451647, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.02700252103451647 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.034107802518361846, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.034107802518361846 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3155963302752294, + "acc_stderr": 0.019926117513869662, + "acc_norm": 0.3155963302752294, + "acc_norm_stderr": 0.019926117513869662 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046637, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046637 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.036906779861372814, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.036906779861372814 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150379, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150379 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.02813968944485967, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.02813968944485967 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.24632352941176472, + "acc_stderr": 0.02617343857052, + "acc_norm": 0.24632352941176472, + "acc_norm_stderr": 0.02617343857052 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3020408163265306, + "acc_stderr": 0.02939360931987982, + "acc_norm": 0.3020408163265306, + "acc_norm_stderr": 0.02939360931987982 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845535, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845535 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507956, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507956 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006509, + "mc2": 0.44330415731488865, + "mc2_stderr": 0.015557823529945149 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2585596221959858, + "acc_stderr": 0.015053354438963988, + "acc_norm": 0.3482880755608028, + "acc_norm_stderr": 0.01637992673914804 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/WizardVicuna2-13b-hf", + "model_sha": "6cfd95e2dcdb6996afa9eb5c63273a1a3524c6c6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/ajoublue-gpt2-base/result_2023-10-29 14:08:36.json b/heegyu/ajoublue-gpt2-base/result_2023-10-29 14:08:36.json new file mode 100644 index 0000000000000000000000000000000000000000..592d2abac725f8f8f6e35b8116937320b75d3e60 --- /dev/null +++ b/heegyu/ajoublue-gpt2-base/result_2023-10-29 14:08:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1962457337883959, + "acc_stderr": 0.01160601988141628, + "acc_norm": 0.25, + "acc_norm_stderr": 0.012653835621466646 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2848038239394543, + "acc_stderr": 0.004503985839041979, + "acc_norm": 0.31876120294762, + "acc_norm_stderr": 0.004650438781745302 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24521072796934865, + "acc_stderr": 0.015384352284543936, + "acc_norm": 0.24521072796934865, + "acc_norm_stderr": 0.015384352284543936 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292326, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292326 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680588, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680588 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.22186495176848875, + "acc_stderr": 0.02359885829286305, + "acc_norm": 0.22186495176848875, + "acc_norm_stderr": 0.02359885829286305 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229136, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229136 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.032894773300986155, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.032894773300986155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.02874898368994106, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.02874898368994106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.20398009950248755, + "acc_stderr": 0.02849317624532607, + "acc_norm": 0.20398009950248755, + "acc_norm_stderr": 0.02849317624532607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.022779719088733396, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.022779719088733396 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28440366972477066, + "acc_stderr": 0.0193420365877026, + "acc_norm": 0.28440366972477066, + "acc_norm_stderr": 0.0193420365877026 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242564, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242564 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226673, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.03562367850095391, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.03562367850095391 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.01098630787004552, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.01098630787004552 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.4679072270766072, + "mc2_stderr": 0.01544600714675648 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29043683589138136, + "acc_stderr": 0.01560760256981463, + "acc_norm": 0.4037780401416765, + "acc_norm_stderr": 0.016869031540298625 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/ajoublue-gpt2-base", + "model_sha": "528c5d0f568ed796e0d87064d72a1baf961a5485", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/ajoublue-gpt2-medium/result_2023-10-29 14:08:46.json b/heegyu/ajoublue-gpt2-medium/result_2023-10-29 14:08:46.json new file mode 100644 index 0000000000000000000000000000000000000000..7c6c752ec988437594a22a0d062a604c87ce3bcd --- /dev/null +++ b/heegyu/ajoublue-gpt2-medium/result_2023-10-29 14:08:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1885665529010239, + "acc_stderr": 0.011430897647675815, + "acc_norm": 0.25426621160409557, + "acc_norm_stderr": 0.012724999945157746 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2983469428400717, + "acc_stderr": 0.004565974937793719, + "acc_norm": 0.33808006373232424, + "acc_norm_stderr": 0.004720891597174735 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.031267817146631786, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.031267817146631786 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.015745497169049053, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.015745497169049053 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.030363582197238174, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.030363582197238174 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.03031371053819892, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.03031371053819892 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.031041941304059285, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.031041941304059285 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052452, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052452 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.043300437496507437, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.043300437496507437 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764815, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764815 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708094, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708094 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1907514450867052, + "acc_stderr": 0.029957851329869327, + "acc_norm": 0.1907514450867052, + "acc_norm_stderr": 0.029957851329869327 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.02361867831006937, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.02361867831006937 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2147239263803681, + "acc_stderr": 0.03226219377286775, + "acc_norm": 0.2147239263803681, + "acc_norm_stderr": 0.03226219377286775 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23765432098765432, + "acc_stderr": 0.02368359183700856, + "acc_norm": 0.23765432098765432, + "acc_norm_stderr": 0.02368359183700856 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.0325771407770966, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.0325771407770966 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24403669724770644, + "acc_stderr": 0.018415286351416402, + "acc_norm": 0.24403669724770644, + "acc_norm_stderr": 0.018415286351416402 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.03512207412302054, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.03512207412302054 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.023929155517351287, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.023929155517351287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884125, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884125 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.017322789207784326, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.017322789207784326 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.02577001564429038, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.02577001564429038 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113895, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113895 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299956, + "mc2": 0.4441202464488538, + "mc2_stderr": 0.015239234605842715 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31759149940968123, + "acc_stderr": 0.01600558187622931, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.017014038119297463 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/ajoublue-gpt2-medium", + "model_sha": "97f502306274301f8406956b485f868a8f416e85", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/koalpaca-355m/result_2023-10-15 11:22:22.json b/heegyu/koalpaca-355m/result_2023-10-15 11:22:22.json new file mode 100644 index 0000000000000000000000000000000000000000..2cbcba41d248bdc5fb796b410360020e8aa90e0e --- /dev/null +++ b/heegyu/koalpaca-355m/result_2023-10-15 11:22:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21075085324232082, + "acc_stderr": 0.011918271754852189, + "acc_norm": 0.2687713310580205, + "acc_norm_stderr": 0.01295506596371068 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3009360685122486, + "acc_stderr": 0.004577275844432453, + "acc_norm": 0.3458474407488548, + "acc_norm_stderr": 0.004746716805735747 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03188578017686398, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03188578017686398 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27330779054916987, + "acc_stderr": 0.015936681062628556, + "acc_norm": 0.27330779054916987, + "acc_norm_stderr": 0.015936681062628556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.03547854198560826, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.03547854198560826 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233137, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233137 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.025025538500532338, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.025025538500532338 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.03375672449560554, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.03375672449560554 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.031041941304059288, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.031041941304059288 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.023060438380857726, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.023060438380857726 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.02948036054954119, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.02948036054954119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.0264803571798957, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.0264803571798957 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772436, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772436 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.021591269407823764, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.021591269407823764 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.034089978868575295, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.034089978868575295 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.0242885336377261, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.0242885336377261 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.18652849740932642, + "acc_stderr": 0.02811209121011746, + "acc_norm": 0.18652849740932642, + "acc_norm_stderr": 0.02811209121011746 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21467889908256882, + "acc_stderr": 0.017604304149256483, + "acc_norm": 0.21467889908256882, + "acc_norm_stderr": 0.017604304149256483 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.02617390850671858, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.02617390850671858 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.16447368421052633, + "acc_stderr": 0.030167533468632726, + "acc_norm": 0.16447368421052633, + "acc_norm_stderr": 0.030167533468632726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.017401816711427653, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.017401816711427653 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642973, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642973 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697625, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697625 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.033981108902946366, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.033981108902946366 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.1836734693877551, + "acc_stderr": 0.024789071332007633, + "acc_norm": 0.1836734693877551, + "acc_norm_stderr": 0.024789071332007633 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.01094657096634878, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.01094657096634878 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871098, + "mc2": 0.428122521678851, + "mc2_stderr": 0.015366900048399064 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.358913813459268, + "acc_stderr": 0.01649180210299904, + "acc_norm": 0.43565525383707204, + "acc_norm_stderr": 0.01704741522947634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/koalpaca-355m", + "model_sha": "a1f4b5022e95bd808e2375dd3ed4c9bfbb64df32", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/kodialogpt-v1/result_2023-10-15 11:16:23.json b/heegyu/kodialogpt-v1/result_2023-10-15 11:16:23.json new file mode 100644 index 0000000000000000000000000000000000000000..b197f44b866788bf853e0759e01b848bd1fa9f09 --- /dev/null +++ b/heegyu/kodialogpt-v1/result_2023-10-15 11:16:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19283276450511946, + "acc_stderr": 0.011529055465663324, + "acc_norm": 0.23122866894197952, + "acc_norm_stderr": 0.012320858834772274 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25652260505875324, + "acc_stderr": 0.004358210689442262, + "acc_norm": 0.2560246962756423, + "acc_norm_stderr": 0.004355436696716298 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822582, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822582 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2835249042145594, + "acc_stderr": 0.016117318166832283, + "acc_norm": 0.2835249042145594, + "acc_norm_stderr": 0.016117318166832283 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.24663677130044842, + "acc_stderr": 0.028930413120910877, + "acc_norm": 0.24663677130044842, + "acc_norm_stderr": 0.028930413120910877 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.20610687022900764, + "acc_stderr": 0.03547771004159462, + "acc_norm": 0.20610687022900764, + "acc_norm_stderr": 0.03547771004159462 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.02684151432295893, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.02684151432295893 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914404, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891363, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891363 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014666, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014666 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.034355680560478746, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.034355680560478746 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680814, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526503, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526503 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868038, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.0181256691808615, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.0181256691808615 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276865, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276865 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.025457756696667878, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.025457756696667878 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.033176727875331574, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.033176727875331574 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23366013071895425, + "acc_stderr": 0.017119158496044506, + "acc_norm": 0.23366013071895425, + "acc_norm_stderr": 0.017119158496044506 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.02423101337054111, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.02423101337054111 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598018, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.028379449451588667, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.028379449451588667 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875833, + "mc2": 0.5203988868301895, + "mc2_stderr": 0.016282877106771964 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.16646989374262103, + "acc_stderr": 0.01280687925641312, + "acc_norm": 0.36835891381345925, + "acc_norm_stderr": 0.01658385898263907 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/kodialogpt-v1", + "model_sha": "f8b2ddbf8feed75a3e4b8b9de8b17b37efb4d5e0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/kogpt-j-350m/result_2023-10-29 14:08:59.json b/heegyu/kogpt-j-350m/result_2023-10-29 14:08:59.json new file mode 100644 index 0000000000000000000000000000000000000000..02ce13259303bd6723e3d234d00911672510aeb4 --- /dev/null +++ b/heegyu/kogpt-j-350m/result_2023-10-29 14:08:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.197098976109215, + "acc_stderr": 0.01162504766988062, + "acc_norm": 0.25426621160409557, + "acc_norm_stderr": 0.01272499994515774 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29864568810993825, + "acc_stderr": 0.00456728777570055, + "acc_norm": 0.3429595698068114, + "acc_norm_stderr": 0.0047372796910361975 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.03446296217088427, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.03446296217088427 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2656449553001277, + "acc_stderr": 0.015794302487888722, + "acc_norm": 0.2656449553001277, + "acc_norm_stderr": 0.015794302487888722 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2170212765957447, + "acc_stderr": 0.026947483121496238, + "acc_norm": 0.2170212765957447, + "acc_norm_stderr": 0.026947483121496238 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818777, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818777 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.024162780284017724, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.024162780284017724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23870967741935484, + "acc_stderr": 0.02425107126220884, + "acc_norm": 0.23870967741935484, + "acc_norm_stderr": 0.02425107126220884 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.027601921381417583, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.027601921381417583 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641143, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641143 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2191358024691358, + "acc_stderr": 0.023016705640262185, + "acc_norm": 0.2191358024691358, + "acc_norm_stderr": 0.023016705640262185 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.02951928261681727, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.02951928261681727 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.024288619466046112, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.024288619466046112 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22712418300653595, + "acc_stderr": 0.01694985327921237, + "acc_norm": 0.22712418300653595, + "acc_norm_stderr": 0.01694985327921237 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902006, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.010946570966348787, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.010946570966348787 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.20098039215686275, + "acc_stderr": 0.028125972265654373, + "acc_norm": 0.20098039215686275, + "acc_norm_stderr": 0.028125972265654373 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766379, + "mc2": 0.4573493572313282, + "mc2_stderr": 0.015233367138630003 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29043683589138136, + "acc_stderr": 0.01560760256981463, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.016977101932601532 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/kogpt-j-350m", + "model_sha": "4020a790a09b76074102be8fc9013d58bcdaf385", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/kogpt-j-base/result_2023-10-29 14:09:11.json b/heegyu/kogpt-j-base/result_2023-10-29 14:09:11.json new file mode 100644 index 0000000000000000000000000000000000000000..c1333a6b2e24b4e1a4540455370ca39bc001e69d --- /dev/null +++ b/heegyu/kogpt-j-base/result_2023-10-29 14:09:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20563139931740615, + "acc_stderr": 0.011810745260742581, + "acc_norm": 0.24573378839590443, + "acc_norm_stderr": 0.012581033453730107 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28540131447918743, + "acc_stderr": 0.0045068240943332985, + "acc_norm": 0.3209520015933081, + "acc_norm_stderr": 0.004658882929099508 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245233, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245233 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26181353767560667, + "acc_stderr": 0.015720838678445266, + "acc_norm": 0.26181353767560667, + "acc_norm_stderr": 0.015720838678445266 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.22127659574468084, + "acc_stderr": 0.02713634960242405, + "acc_norm": 0.22127659574468084, + "acc_norm_stderr": 0.02713634960242405 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.035294868015111155, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.035294868015111155 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.242152466367713, + "acc_stderr": 0.028751392398694755, + "acc_norm": 0.242152466367713, + "acc_norm_stderr": 0.028751392398694755 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467766, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467766 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.19696969696969696, + "acc_stderr": 0.02833560973246335, + "acc_norm": 0.19696969696969696, + "acc_norm_stderr": 0.02833560973246335 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2, + "acc_stderr": 0.0333333333333333, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0333333333333333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136098, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136098 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.024121125416941187, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.024121125416941187 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.024618298195866507, + "acc_norm": 0.2, + "acc_norm_stderr": 0.024618298195866507 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622841, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622841 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165581, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165581 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.023445826276545543, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.023445826276545543 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.032910995786157714, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.032910995786157714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.023576881744005723, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.023576881744005723 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518754, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518754 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.01827257581023186, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.01827257581023186 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.02342037547829613, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.02342037547829613 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.01747948700136476, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.01747948700136476 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266726, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266726 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859919, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859919 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.02993534270787775, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.02993534270787775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2, + "acc_stderr": 0.02560737598657916, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02560737598657916 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.010946570966348783, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.010946570966348783 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083292, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083292 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.01522589934082682, + "mc2": 0.4666916578437702, + "mc2_stderr": 0.015201094715829425 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3412042502951594, + "acc_stderr": 0.016300368742137302, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.017185069732676517 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/kogpt-j-base", + "model_sha": "212ebff345958e108fc47ae0daa892328ca6ece2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/llama-2-ko-7b-chat/result_2023-09-27 06:17:19.json b/heegyu/llama-2-ko-7b-chat/result_2023-09-27 06:17:19.json new file mode 100644 index 0000000000000000000000000000000000000000..b7b6bdff530fee2ae4e5cbc3f394dbf0030c2050 --- /dev/null +++ b/heegyu/llama-2-ko-7b-chat/result_2023-09-27 06:17:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.013491429517292038, + "acc_norm": 0.34726962457337884, + "acc_norm_stderr": 0.013913034529620439 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37004580760804623, + "acc_stderr": 0.004818298991012552, + "acc_norm": 0.47231627165903206, + "acc_norm_stderr": 0.004982127315605219 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.03401052620104088, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.03401052620104088 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2988505747126437, + "acc_stderr": 0.016369256815093127, + "acc_norm": 0.2988505747126437, + "acc_norm_stderr": 0.016369256815093127 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617724, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617724 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.034843315926805875, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.034843315926805875 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.02685882587948855, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.02685882587948855 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.029605103217038332, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.029605103217038332 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.038073871163060866, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.038073871163060866 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713547, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713547 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.03831226048850333, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.03831226048850333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714506, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714506 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.02665353159671549, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.02665353159671549 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.02093244577446317, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.02093244577446317 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.046166311118017125, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.046166311118017125 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.025189006660212385, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.025189006660212385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.030236389942173106, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.030236389942173106 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3482587064676617, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.3482587064676617, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.024685316867257806, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.024685316867257806 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.02563082497562135, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.02563082497562135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29541284403669726, + "acc_stderr": 0.019560619182976, + "acc_norm": 0.29541284403669726, + "acc_norm_stderr": 0.019560619182976 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.0312984318574381, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.0312984318574381 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046644, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046644 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3022875816993464, + "acc_stderr": 0.018579232711113877, + "acc_norm": 0.3022875816993464, + "acc_norm_stderr": 0.018579232711113877 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.0420327729146776, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.0420327729146776 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.02746740180405799, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.02746740180405799 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.17279411764705882, + "acc_stderr": 0.022966067585581788, + "acc_norm": 0.17279411764705882, + "acc_norm_stderr": 0.022966067585581788 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2163265306122449, + "acc_stderr": 0.026358916334904028, + "acc_norm": 0.2163265306122449, + "acc_norm_stderr": 0.026358916334904028 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3080168776371308, + "acc_stderr": 0.030052389335605695, + "acc_norm": 0.3080168776371308, + "acc_norm_stderr": 0.030052389335605695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771312, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373618, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373618 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041867, + "mc2": 0.3946101299678252, + "mc2_stderr": 0.01496139592173614 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.19008264462809918, + "acc_stderr": 0.013489827742736766, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.015588800386053557 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/llama-2-ko-7b-chat", + "model_sha": "98096a3f4d095e42ba10daec38ad329d9576f4cd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/llama-2-koen-13b-OKI-v20231124-1e-5/result_2023-11-28 06:34:03.json b/heegyu/llama-2-koen-13b-OKI-v20231124-1e-5/result_2023-11-28 06:34:03.json new file mode 100644 index 0000000000000000000000000000000000000000..6166c7a8998d9a87d53ca8945cab7e3e8c280aa3 --- /dev/null +++ b/heegyu/llama-2-koen-13b-OKI-v20231124-1e-5/result_2023-11-28 06:34:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.01423008476191047, + "acc_norm": 0.44795221843003413, + "acc_norm_stderr": 0.014532011498211667 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42620991834295957, + "acc_stderr": 0.004935143791573814, + "acc_norm": 0.5728938458474407, + "acc_norm_stderr": 0.004936470085238486 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.017829131764287184, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.017829131764287184 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102318, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102318 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009798, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009798 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972602, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960717, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960717 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075658, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075658 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779207, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779207 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353603, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353603 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3044328552803129, + "acc_stderr": 0.011752877592597579, + "acc_norm": 0.3044328552803129, + "acc_norm_stderr": 0.011752877592597579 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842885, + "mc2": 0.4422768606291578, + "mc2_stderr": 0.01506617476188256 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.01719034212344866, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.01705263355985607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/llama-2-koen-13b-OKI-v20231124-1e-5", + "model_sha": "8a9a4c042bebc53d1e3fee972cb49752ddabda95", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/polyglot-ko-1.3b-chat/result_2023-10-14 15:59:35.json b/heegyu/polyglot-ko-1.3b-chat/result_2023-10-14 15:59:35.json new file mode 100644 index 0000000000000000000000000000000000000000..e50702301673ba18f5ea33470f95222714b44b14 --- /dev/null +++ b/heegyu/polyglot-ko-1.3b-chat/result_2023-10-14 15:59:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2295221843003413, + "acc_stderr": 0.012288926760890793, + "acc_norm": 0.27559726962457337, + "acc_norm_stderr": 0.013057169655761838 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3253335988846843, + "acc_stderr": 0.004675418774314241, + "acc_norm": 0.3995220075682135, + "acc_norm_stderr": 0.004887991225950282 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691583, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691583 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2784163473818646, + "acc_stderr": 0.016028295188992455, + "acc_norm": 0.2784163473818646, + "acc_norm_stderr": 0.016028295188992455 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03591444084196969, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03591444084196969 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036843, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036843 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838752, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838752 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.031417842916639245, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.031417842916639245 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21076233183856502, + "acc_stderr": 0.027373095500540193, + "acc_norm": 0.21076233183856502, + "acc_norm_stderr": 0.027373095500540193 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.035954616117746904, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.035954616117746904 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124498, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124498 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.03068473711513537, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.03068473711513537 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.02345467488940429, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.02345467488940429 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.031618563353586114, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.031618563353586114 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335137, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335137 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.027236013946196687, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.027236013946196687 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241235, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.27860696517412936, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.27860696517412936, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191179, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191179 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633345, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633345 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21098265895953758, + "acc_stderr": 0.021966309947043117, + "acc_norm": 0.21098265895953758, + "acc_norm_stderr": 0.021966309947043117 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25308641975308643, + "acc_stderr": 0.024191808600713002, + "acc_norm": 0.25308641975308643, + "acc_norm_stderr": 0.024191808600713002 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20550458715596331, + "acc_stderr": 0.017324352325016015, + "acc_norm": 0.20550458715596331, + "acc_norm_stderr": 0.017324352325016015 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.02526169121972948, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.02526169121972948 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.03064360707167709, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.03064360707167709 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.20261437908496732, + "acc_stderr": 0.01626105528374612, + "acc_norm": 0.20261437908496732, + "acc_norm_stderr": 0.01626105528374612 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631157, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.02993534270787775, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.02993534270787775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.025991117672813292, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.025991117672813292 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994927, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994927 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2379400260756193, + "acc_stderr": 0.010875700787694243, + "acc_norm": 0.2379400260756193, + "acc_norm_stderr": 0.010875700787694243 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501933, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501933 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.034277431758165236, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.034277431758165236 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.014948812679062137, + "mc2": 0.4105215346532836, + "mc2_stderr": 0.015140606421446082 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29043683589138136, + "acc_stderr": 0.01560760256981463, + "acc_norm": 0.3447461629279811, + "acc_norm_stderr": 0.016340649905418697 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/polyglot-ko-1.3b-chat", + "model_sha": "156656e44a70bc0905777f682f16237758d16b16", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/polyglot-ko-3.8b-chat/result_2023-10-14 16:00:21.json b/heegyu/polyglot-ko-3.8b-chat/result_2023-10-14 16:00:21.json new file mode 100644 index 0000000000000000000000000000000000000000..eb9ffc2771dcbbaf45242beb1ecbc240ab06d886 --- /dev/null +++ b/heegyu/polyglot-ko-3.8b-chat/result_2023-10-14 16:00:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.012766923794116801, + "acc_norm": 0.30887372013651876, + "acc_norm_stderr": 0.013501770929344003 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35172276438956385, + "acc_stderr": 0.004765320784902128, + "acc_norm": 0.4396534554869548, + "acc_norm_stderr": 0.004953305461311753 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2656449553001277, + "acc_stderr": 0.01579430248788873, + "acc_norm": 0.2656449553001277, + "acc_norm_stderr": 0.01579430248788873 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174022, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174022 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.02655698211783875, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.02655698211783875 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.025218040373410622, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.025218040373410622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217487, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217487 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.03831226048850333, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.03831226048850333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380558, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380558 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132354, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764833, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106734, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106734 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.17272727272727273, + "acc_stderr": 0.03620691833929217, + "acc_norm": 0.17272727272727273, + "acc_norm_stderr": 0.03620691833929217 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729602, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729602 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.03214737302029468, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.03214737302029468 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566016, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258158, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258158 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886338, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886338 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.017883188134667192, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.017883188134667192 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.02691748122437723, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.02691748122437723 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2979591836734694, + "acc_stderr": 0.029279567411065684, + "acc_norm": 0.2979591836734694, + "acc_norm_stderr": 0.029279567411065684 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25945241199478486, + "acc_stderr": 0.011195262076350309, + "acc_norm": 0.25945241199478486, + "acc_norm_stderr": 0.011195262076350309 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522517, + "mc2": 0.42818983286182555, + "mc2_stderr": 0.015309048799107149 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.01547327158398843, + "acc_norm": 0.3412042502951594, + "acc_norm_stderr": 0.016300368742137306 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/polyglot-ko-3.8b-chat", + "model_sha": "0e8739e22d15d44f6196fb281895856a0372564a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/polyglot-ko-5.8b-chat/result_2023-10-14 16:01:20.json b/heegyu/polyglot-ko-5.8b-chat/result_2023-10-14 16:01:20.json new file mode 100644 index 0000000000000000000000000000000000000000..8768999ef3aad47f2e8bb54b795000ac3b19deef --- /dev/null +++ b/heegyu/polyglot-ko-5.8b-chat/result_2023-10-14 16:01:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2815699658703072, + "acc_stderr": 0.013143376735009007, + "acc_norm": 0.3165529010238908, + "acc_norm_stderr": 0.01359243151906808 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35899223262298346, + "acc_stderr": 0.004787245377967104, + "acc_norm": 0.4522007568213503, + "acc_norm_stderr": 0.004966928094797578 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23391812865497075, + "acc_stderr": 0.03246721765117827, + "acc_norm": 0.23391812865497075, + "acc_norm_stderr": 0.03246721765117827 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1553398058252427, + "acc_stderr": 0.03586594738573974, + "acc_norm": 0.1553398058252427, + "acc_norm_stderr": 0.03586594738573974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.29757343550446996, + "acc_stderr": 0.016349111912909418, + "acc_norm": 0.29757343550446996, + "acc_norm_stderr": 0.016349111912909418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678316, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678316 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880557, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880557 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134988, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134988 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.03008862949021749, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.03008862949021749 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342853, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342853 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.32564102564102565, + "acc_stderr": 0.02375966576741229, + "acc_norm": 0.32564102564102565, + "acc_norm_stderr": 0.02375966576741229 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553883, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553883 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695245, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695245 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.038313051408846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.038313051408846 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.27860696517412936, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.27860696517412936, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198823, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198823 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.20520231213872833, + "acc_stderr": 0.021742519835276284, + "acc_norm": 0.20520231213872833, + "acc_norm_stderr": 0.021742519835276284 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.025407197798890162, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.025407197798890162 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916646, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916646 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26055045871559634, + "acc_stderr": 0.01881918203485007, + "acc_norm": 0.26055045871559634, + "acc_norm_stderr": 0.01881918203485007 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.024404394928087873, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.024404394928087873 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.01690661592728815, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.01690661592728815 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902013, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902013 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293646, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293646 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.02747227447323382, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.02747227447323382 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3551020408163265, + "acc_stderr": 0.03063565515038764, + "acc_norm": 0.3551020408163265, + "acc_norm_stderr": 0.03063565515038764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.029818024749753102, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.029818024749753102 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25488917861799215, + "acc_stderr": 0.011130509812662979, + "acc_norm": 0.25488917861799215, + "acc_norm_stderr": 0.011130509812662979 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693285, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693285 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156475, + "mc2": 0.4027649410811347, + "mc2_stderr": 0.014993381048704797 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.01547327158398843, + "acc_norm": 0.3412042502951594, + "acc_norm_stderr": 0.0163003687421373 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/polyglot-ko-5.8b-chat", + "model_sha": "58d274dbd13bd1829a6bd17d90c493bd9039564f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/zephyr-7b-beta-KOR-OpenOrca-Platypus-1e-5/result_2023-11-27 11:04:41.json b/heegyu/zephyr-7b-beta-KOR-OpenOrca-Platypus-1e-5/result_2023-11-27 11:04:41.json new file mode 100644 index 0000000000000000000000000000000000000000..881b0a988b42f89e340a92bd4e7c278f5d95c009 --- /dev/null +++ b/heegyu/zephyr-7b-beta-KOR-OpenOrca-Platypus-1e-5/result_2023-11-27 11:04:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3242320819112628, + "acc_stderr": 0.01367881039951882, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759079 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36675960963951404, + "acc_stderr": 0.004809352075008939, + "acc_norm": 0.47171878111929894, + "acc_norm_stderr": 0.004981793089848261 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.039992628766177235, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.039992628766177235 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071722, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071722 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684973, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507748, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507748 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.04760548821460325, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.04760548821460325 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.03528131472933606, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.03528131472933606 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.024419234966819074, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.024419234966819074 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206188, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206188 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.021406952688151588, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.021406952688151588 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.01929196189506638, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.01929196189506638 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467763, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467763 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29720670391061454, + "acc_stderr": 0.015285313353641595, + "acc_norm": 0.29720670391061454, + "acc_norm_stderr": 0.015285313353641595 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.032002553478937816, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.032002553478937816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3155149934810952, + "acc_stderr": 0.011869184843058638, + "acc_norm": 0.3155149934810952, + "acc_norm_stderr": 0.011869184843058638 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512567, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512567 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.4845124425990411, + "mc2_stderr": 0.01549474022798638 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4155844155844156, + "acc_stderr": 0.016943586313076568, + "acc_norm": 0.44510035419126326, + "acc_norm_stderr": 0.017086417431005467 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/zephyr-7b-beta-KOR-OpenOrca-Platypus-1e-5", + "model_sha": "e2a745cc691255a55c3880b49b374d27305faac3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hongzoh/Yi-Ko-6B_Open-Platypus/result_2023-12-29 06:19:40.json b/hongzoh/Yi-Ko-6B_Open-Platypus/result_2023-12-29 06:19:40.json new file mode 100644 index 0000000000000000000000000000000000000000..9bb75b721a5d9aa027a28cbd046cce7acf623dfa --- /dev/null +++ b/hongzoh/Yi-Ko-6B_Open-Platypus/result_2023-12-29 06:19:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3361774744027304, + "acc_stderr": 0.013804855026205763, + "acc_norm": 0.4044368600682594, + "acc_norm_stderr": 0.014342036483436174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.392850029874527, + "acc_stderr": 0.004873858323840787, + "acc_norm": 0.5269866560446126, + "acc_norm_stderr": 0.00498250819858427 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5325670498084292, + "acc_stderr": 0.017841995750520867, + "acc_norm": 0.5325670498084292, + "acc_norm_stderr": 0.017841995750520867 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.034812853382329645, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.034812853382329645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920938, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920938 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.041124909746707884, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.041124909746707884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.616580310880829, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.616580310880829, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6146788990825688, + "acc_stderr": 0.02086585085279411, + "acc_norm": 0.6146788990825688, + "acc_norm_stderr": 0.02086585085279411 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061173, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061173 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639886, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33854748603351953, + "acc_stderr": 0.01582670009648135, + "acc_norm": 0.33854748603351953, + "acc_norm_stderr": 0.01582670009648135 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.030472526026726492, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.030472526026726492 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104095, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.4177084248121788, + "mc2_stderr": 0.014895946713066507 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.01718938362722969, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972205 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hongzoh/Yi-Ko-6B_Open-Platypus", + "model_sha": "3096318116f2d7da1bd293ad21888ac91ec5cc46", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/huggyllama/llama-13b/result_2023-09-27 04:58:53.json b/huggyllama/llama-13b/result_2023-09-27 04:58:53.json new file mode 100644 index 0000000000000000000000000000000000000000..952a5c4fb8f5a10f58d9fb8fd6cb163caf39ac1c --- /dev/null +++ b/huggyllama/llama-13b/result_2023-09-27 04:58:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2098976109215017, + "acc_stderr": 0.01190054874804745, + "acc_norm": 0.2593856655290102, + "acc_norm_stderr": 0.012808273573927092 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3014339772953595, + "acc_stderr": 0.004579429184835869, + "acc_norm": 0.3571001792471619, + "acc_norm_stderr": 0.004781654610857135 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.036155076303109344, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.036155076303109344 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326467, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326467 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3065134099616858, + "acc_stderr": 0.016486952893041515, + "acc_norm": 0.3065134099616858, + "acc_norm_stderr": 0.016486952893041515 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.03036358219723816, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.03036358219723816 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370519, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370519 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3536977491961415, + "acc_stderr": 0.02715520810320086, + "acc_norm": 0.3536977491961415, + "acc_norm_stderr": 0.02715520810320086 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.27802690582959644, + "acc_stderr": 0.03006958487449405, + "acc_norm": 0.27802690582959644, + "acc_norm_stderr": 0.03006958487449405 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467121, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467121 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886838, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886838 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33076923076923076, + "acc_stderr": 0.0238547956809711, + "acc_norm": 0.33076923076923076, + "acc_norm_stderr": 0.0238547956809711 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.031089826002937523, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.031089826002937523 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33548387096774196, + "acc_stderr": 0.02686020644472434, + "acc_norm": 0.33548387096774196, + "acc_norm_stderr": 0.02686020644472434 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.03265903381186195, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.03265903381186195 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670716, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670716 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.21851851851851853, + "acc_stderr": 0.025195752251823793, + "acc_norm": 0.21851851851851853, + "acc_norm_stderr": 0.025195752251823793 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3283582089552239, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.3283582089552239, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031705, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031705 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.025722802200895817, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.025722802200895817 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.025702640260603753, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.025702640260603753 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089117, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089117 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29724770642201837, + "acc_stderr": 0.019595707224643533, + "acc_norm": 0.29724770642201837, + "acc_norm_stderr": 0.019595707224643533 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.02778014120702335, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.02778014120702335 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04545454545454546, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04545454545454546 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137282, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137282 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053446, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053446 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880585, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880585 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.026431329870789513, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.026431329870789513 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.029696338713422893, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.029696338713422893 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771312, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.031980016601150706, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.031980016601150706 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091709, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091709 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.01555077833284288, + "mc2": 0.43560981343267496, + "mc2_stderr": 0.01587676917939091 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22077922077922077, + "acc_stderr": 0.014260152803540045, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.016327334806429145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "huggyllama/llama-13b", + "model_sha": "bf57045473f207bb1de1ed035ace226f4d9f9bba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/huggyllama/llama-7b/result_2023-09-28 00:26:14.json b/huggyllama/llama-7b/result_2023-09-28 00:26:14.json new file mode 100644 index 0000000000000000000000000000000000000000..f72099bdea5e97287e4049d569e0ee0fb62f8af3 --- /dev/null +++ b/huggyllama/llama-7b/result_2023-09-28 00:26:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2175767918088737, + "acc_stderr": 0.012057262020972508, + "acc_norm": 0.2525597269624573, + "acc_norm_stderr": 0.012696728980207708 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29197371041625175, + "acc_stderr": 0.004537410615572941, + "acc_norm": 0.3343955387373033, + "acc_norm_stderr": 0.004708145393411397 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0330140594698725, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0330140594698725 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.040580420156460344, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.040580420156460344 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2554278416347382, + "acc_stderr": 0.01559495538445577, + "acc_norm": 0.2554278416347382, + "acc_norm_stderr": 0.01559495538445577 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.02895734278834235, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.02895734278834235 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.0368078369072758, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.0368078369072758 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2347266881028939, + "acc_stderr": 0.024071805887677048, + "acc_norm": 0.2347266881028939, + "acc_norm_stderr": 0.024071805887677048 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19282511210762332, + "acc_stderr": 0.026478240960489365, + "acc_norm": 0.19282511210762332, + "acc_norm_stderr": 0.026478240960489365 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.038808483010823944, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.038808483010823944 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361266, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361266 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.021606294494647727, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.021606294494647727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030049, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030049 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0317852971064275, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0317852971064275 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885193, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891172, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891172 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.025497532639609546, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.025497532639609546 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696525, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916718, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916718 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.024257901705323374, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.024257901705323374 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.025251173936495033, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.025251173936495033 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909906, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909906 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.326605504587156, + "acc_stderr": 0.020106990889937303, + "acc_norm": 0.326605504587156, + "acc_norm_stderr": 0.020106990889937303 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.018718067052623227, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.018718067052623227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180844, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180844 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.0356236785009539, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.0356236785009539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826371, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826371 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.025336848563332372, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.025336848563332372 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788163, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842555, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842555 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2542372881355932, + "acc_stderr": 0.011121129007840685, + "acc_norm": 0.2542372881355932, + "acc_norm_stderr": 0.011121129007840685 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.02955429260569506, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.02955429260569506 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237272, + "mc2": 0.4405577919486417, + "mc2_stderr": 0.01601590664012013 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27390791027154665, + "acc_stderr": 0.015332499474791022, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.0166058012892126 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "huggyllama/llama-7b", + "model_sha": "8416d3fefb0cb3ff5775a7b13c1692d10ff1aa16", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwanhe/Big_Minirecord02/result_2023-11-19 23:35:28.json b/hwanhe/Big_Minirecord02/result_2023-11-19 23:35:28.json new file mode 100644 index 0000000000000000000000000000000000000000..26c256d55a68728f806d816817c46290cac05b18 --- /dev/null +++ b/hwanhe/Big_Minirecord02/result_2023-11-19 23:35:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3873720136518771, + "acc_stderr": 0.01423587248790987, + "acc_norm": 0.43430034129692835, + "acc_norm_stderr": 0.014484703048857355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3999203345947023, + "acc_stderr": 0.0048888050031030755, + "acc_norm": 0.5129456283608843, + "acc_norm_stderr": 0.004988108663179773 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.038237270928823064, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.038237270928823064 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491355, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.032436186361081025, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.032436186361081025 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.02837327096106942, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.02837327096106942 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268815, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268815 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048411, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048411 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5082568807339449, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.5082568807339449, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138282, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138282 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.01945076843250551, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.01945076843250551 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29832402234636873, + "acc_stderr": 0.01530184004512928, + "acc_norm": 0.29832402234636873, + "acc_norm_stderr": 0.01530184004512928 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763125, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763125 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.032230171959375976, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.032230171959375976 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30834419817470665, + "acc_stderr": 0.011794833789715329, + "acc_norm": 0.30834419817470665, + "acc_norm_stderr": 0.011794833789715329 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.42843883250666265, + "mc2_stderr": 0.015479949381497765 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41912632821723733, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.01717567127983645 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwanhe/Big_Minirecord02", + "model_sha": "32f6a2427781870bac71410f3b68407d4db6ce0d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwanhe/Mistral_sum_test01/result_2023-11-07 02:58:51.json b/hwanhe/Mistral_sum_test01/result_2023-11-07 02:58:51.json new file mode 100644 index 0000000000000000000000000000000000000000..9c9fb675d8d032fc6696187a6e12463c2e442369 --- /dev/null +++ b/hwanhe/Mistral_sum_test01/result_2023-11-07 02:58:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27303754266211605, + "acc_stderr": 0.013019332762635734, + "acc_norm": 0.3046075085324232, + "acc_norm_stderr": 0.01344952210993249 + }, + "harness|ko_hellaswag|10": { + "acc": 0.31935869348735313, + "acc_stderr": 0.004652753439460146, + "acc_norm": 0.3890659231228839, + "acc_norm_stderr": 0.004865419468213886 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913235, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913235 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38697318007662834, + "acc_stderr": 0.017417138059440153, + "acc_norm": 0.38697318007662834, + "acc_norm_stderr": 0.017417138059440153 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.028957342788342347, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.028957342788342347 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.39228295819935693, + "acc_stderr": 0.027731258647012, + "acc_norm": 0.39228295819935693, + "acc_norm_stderr": 0.027731258647012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.03120469122515002, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.03120469122515002 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.024503472557110946, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.024503472557110946 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.02770935967503249, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.02770935967503249 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651047, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.0295822451283843, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.0295822451283843 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145665, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145665 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4925373134328358, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.4925373134328358, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247077, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247077 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307695, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307695 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261746, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261746 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02712511551316687, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02712511551316687 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.03469713791704372, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.03469713791704372 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3467889908256881, + "acc_stderr": 0.020406097104093027, + "acc_norm": 0.3467889908256881, + "acc_norm_stderr": 0.020406097104093027 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092487, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092487 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265015, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265015 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372434, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372434 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928006, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4430379746835443, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.4430379746835443, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2796610169491525, + "acc_stderr": 0.011463397393861957, + "acc_norm": 0.2796610169491525, + "acc_norm_stderr": 0.011463397393861957 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.0372820699868265, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.0372820699868265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.4365926945057216, + "mc2_stderr": 0.01582664283045154 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3778040141676505, + "acc_stderr": 0.016669082840694967, + "acc_norm": 0.41440377804014167, + "acc_norm_stderr": 0.01693658338394362 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwanhe/Mistral_sum_test01", + "model_sha": "de97843340ab3e732f4ba05ecd22727d76b6c628", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwanhe/Mistral_test01/result_2023-10-30 07:55:50.json b/hwanhe/Mistral_test01/result_2023-10-30 07:55:50.json new file mode 100644 index 0000000000000000000000000000000000000000..ea30798c6425786a00c2c77b86d755ecc1f31f0f --- /dev/null +++ b/hwanhe/Mistral_test01/result_2023-10-30 07:55:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3361774744027304, + "acc_stderr": 0.013804855026205765, + "acc_norm": 0.3822525597269625, + "acc_norm_stderr": 0.014200454049979295 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3701453893646684, + "acc_stderr": 0.004818566366066918, + "acc_norm": 0.4788886675960964, + "acc_norm_stderr": 0.004985331652408345 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.04931801994220416, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.04931801994220416 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.017870847506081717, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.017870847506081717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894255, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894255 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343119, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389174, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389174 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.0250437573185202, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.0250437573185202 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.02681771813034892, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.02681771813034892 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833946, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833946 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45688073394495415, + "acc_stderr": 0.02135745878522621, + "acc_norm": 0.45688073394495415, + "acc_norm_stderr": 0.02135745878522621 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375387, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375387 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115892, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115892 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.01473692638376197, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.01473692638376197 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394812, + "mc2": 0.46466595944130523, + "mc2_stderr": 0.015564409326931861 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4781582054309327, + "acc_stderr": 0.017173944474294375, + "acc_norm": 0.5147579693034239, + "acc_norm_stderr": 0.017182864434998564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwanhe/Mistral_test01", + "model_sha": "cda9c485214eb8845c47321ef32126ce6622707d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwanhe/Mistral_test02/result_2023-10-31 10:01:31.json b/hwanhe/Mistral_test02/result_2023-10-31 10:01:31.json new file mode 100644 index 0000000000000000000000000000000000000000..a756a2e0ace9c1ce1a4068b68db5bd6c2f6e6359 --- /dev/null +++ b/hwanhe/Mistral_test02/result_2023-10-31 10:01:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38310580204778155, + "acc_stderr": 0.014206472661672883, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303022 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39454291973710415, + "acc_stderr": 0.004877534215987088, + "acc_norm": 0.5135431189006174, + "acc_norm_stderr": 0.004987950663406535 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48659003831417624, + "acc_stderr": 0.01787353173651041, + "acc_norm": 0.48659003831417624, + "acc_norm_stderr": 0.01787353173651041 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.031489558297455304, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.031489558297455304 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.02836504154256457, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.02836504154256457 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767762, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767762 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547307, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547307 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159663, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159663 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.025305906241590632, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.025305906241590632 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45321100917431195, + "acc_stderr": 0.021343255165546037, + "acc_norm": 0.45321100917431195, + "acc_norm_stderr": 0.021343255165546037 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.01976621199107306, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.01976621199107306 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22569832402234638, + "acc_stderr": 0.013981395058455057, + "acc_norm": 0.22569832402234638, + "acc_norm_stderr": 0.013981395058455057 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.027472274473233818, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.027472274473233818 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.012095592506931974, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.012095592506931974 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.4624714536105945, + "mc2_stderr": 0.015555617186203954 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48288075560802834, + "acc_stderr": 0.017180275246085622, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwanhe/Mistral_test02", + "model_sha": "28ec5016b4e828b0ba127543e9e2931a587a0652", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwanhe/Mistral_test03/result_2023-11-01 23:53:49.json b/hwanhe/Mistral_test03/result_2023-11-01 23:53:49.json new file mode 100644 index 0000000000000000000000000000000000000000..9d29cab639cd845190dff451c8fd2e6d1945bc7c --- /dev/null +++ b/hwanhe/Mistral_test03/result_2023-11-01 23:53:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111728, + "acc_norm": 0.42406143344709896, + "acc_norm_stderr": 0.014441889627464392 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39165504879506075, + "acc_stderr": 0.0048712266293464, + "acc_norm": 0.5067715594503087, + "acc_norm_stderr": 0.004989323787413519 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48659003831417624, + "acc_stderr": 0.017873531736510396, + "acc_norm": 0.48659003831417624, + "acc_norm_stderr": 0.017873531736510396 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502737, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.036030385453603854, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.036030385453603854 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155247, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155247 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666654, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666654 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5064220183486239, + "acc_stderr": 0.02143555482001308, + "acc_norm": 0.5064220183486239, + "acc_norm_stderr": 0.02143555482001308 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.028629305194003533, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.028629305194003533 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786682, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786682 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925312, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925312 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031225, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031225 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.0321481463024037, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.0321481463024037 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.012002091666902312, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.012002091666902312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.4568705456080681, + "mc2_stderr": 0.01559510037840762 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.017185069732676528, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwanhe/Mistral_test03", + "model_sha": "0fbb09941fd9a175f92d61159081a8cbd5428061", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwanhe/Mistral_test04/result_2023-11-05 22:29:58.json b/hwanhe/Mistral_test04/result_2023-11-05 22:29:58.json new file mode 100644 index 0000000000000000000000000000000000000000..7e0e97ff2bec2f1f2386e96a976cf44390c5aa57 --- /dev/null +++ b/hwanhe/Mistral_test04/result_2023-11-05 22:29:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.43856655290102387, + "acc_norm_stderr": 0.014500682618212862 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39543915554670384, + "acc_stderr": 0.004879455474663812, + "acc_norm": 0.514937263493328, + "acc_norm_stderr": 0.0049875542559818554 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491355, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.025254485424799605, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.025254485424799605 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360385, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360385 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.01964380155792481, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.01964380155792481 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025445, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988633, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988633 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763126, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763126 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3285528031290743, + "acc_stderr": 0.011996027247502922, + "acc_norm": 0.3285528031290743, + "acc_norm_stderr": 0.011996027247502922 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.01600265148736101, + "mc2": 0.4599450727674709, + "mc2_stderr": 0.015606224187062706 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwanhe/Mistral_test04", + "model_sha": "6d205df368f10311a3220229fafc0dcf0668e446", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/SOLAR-10.7B-dpo-v0.1/result_2023-12-31 07:40:27.json b/hyeogi/SOLAR-10.7B-dpo-v0.1/result_2023-12-31 07:40:27.json new file mode 100644 index 0000000000000000000000000000000000000000..950ffb6c6d1ea4d8f521b5d9ccc4734a9ae0c6e3 --- /dev/null +++ b/hyeogi/SOLAR-10.7B-dpo-v0.1/result_2023-12-31 07:40:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4283276450511945, + "acc_stderr": 0.01446049636759902, + "acc_norm": 0.47952218430034127, + "acc_norm_stderr": 0.014599131353035017 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43696474805815577, + "acc_stderr": 0.004949969363017665, + "acc_norm": 0.594901414060944, + "acc_norm_stderr": 0.004899078300184254 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.03762738699917057, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.03762738699917057 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6232439335887612, + "acc_stderr": 0.01732829290730306, + "acc_norm": 0.6232439335887612, + "acc_norm_stderr": 0.01732829290730306 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.02773125864701199, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.02773125864701199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5840336134453782, + "acc_stderr": 0.03201650100739611, + "acc_norm": 0.5840336134453782, + "acc_norm_stderr": 0.03201650100739611 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.02531764972644868, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.02531764972644868 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5548387096774193, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.5548387096774193, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700915, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700915 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389188, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389188 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.031524391865554044, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.031524391865554044 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.025424835086924006, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.025424835086924006 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.026830805998952243, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.026830805998952243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6735751295336787, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.6735751295336787, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.04685473041907789, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.04685473041907789 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.636697247706422, + "acc_stderr": 0.020620603919625804, + "acc_norm": 0.636697247706422, + "acc_norm_stderr": 0.020620603919625804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.028358956313423545, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.028358956313423545 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.47875816993464054, + "acc_stderr": 0.020209572388600244, + "acc_norm": 0.47875816993464054, + "acc_norm_stderr": 0.020209572388600244 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.01513160884996375, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.01513160884996375 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4889705882352941, + "acc_stderr": 0.03036544647727568, + "acc_norm": 0.4889705882352941, + "acc_norm_stderr": 0.03036544647727568 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7215189873417721, + "acc_stderr": 0.029178682304842534, + "acc_norm": 0.7215189873417721, + "acc_norm_stderr": 0.029178682304842534 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39374185136897, + "acc_stderr": 0.012478532272564439, + "acc_norm": 0.39374185136897, + "acc_norm_stderr": 0.012478532272564439 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.033933885849584046, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.033933885849584046 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.037694303145125695, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.037694303145125695 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4320685434516524, + "mc1_stderr": 0.01734120239498825, + "mc2": 0.6097452426860501, + "mc2_stderr": 0.015682133857043576 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5832349468713105, + "acc_stderr": 0.016950489146108826, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.016709165387228837 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/SOLAR-10.7B-dpo-v0.1", + "model_sha": "8e657d79a30c8030a2e5bebfc60425e6a849a5bc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/Yi-6b-dpo-v0.1/result_2023-12-05 03:43:34.json b/hyeogi/Yi-6b-dpo-v0.1/result_2023-12-05 03:43:34.json new file mode 100644 index 0000000000000000000000000000000000000000..e8a1455917626834ad619144ad8195be13a4c293 --- /dev/null +++ b/hyeogi/Yi-6b-dpo-v0.1/result_2023-12-05 03:43:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.013888816286782112, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398326 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3943437562238598, + "acc_stderr": 0.004877104939356235, + "acc_norm": 0.5223063134833699, + "acc_norm_stderr": 0.004984813391016212 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.01781040392543536, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.01781040392543536 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.0348890161685273, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.0348890161685273 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173095, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173095 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504513, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504513 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.02386520683697259, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.02386520683697259 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420075, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.041634530313028585, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.041634530313028585 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556044, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556044 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281515, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882622, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882622 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4742647058823529, + "acc_stderr": 0.03033257809455504, + "acc_norm": 0.4742647058823529, + "acc_norm_stderr": 0.03033257809455504 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.03164209487942941, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.03164209487942941 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3089960886571056, + "acc_stderr": 0.011801729777239249, + "acc_norm": 0.3089960886571056, + "acc_norm_stderr": 0.011801729777239249 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3598531211750306, + "mc1_stderr": 0.016801860466677126, + "mc2": 0.5402612898523886, + "mc2_stderr": 0.01538434298166149 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6033057851239669, + "acc_stderr": 0.016819438642971404, + "acc_norm": 0.6399055489964581, + "acc_norm_stderr": 0.01650368672044008 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/Yi-6b-dpo-v0.1", + "model_sha": "2e263aec3b4b3fa27baa420ce98448d4b3644632", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/Yi-6b-dpo-v0.2/result_2023-12-08 12:58:33.json b/hyeogi/Yi-6b-dpo-v0.2/result_2023-12-08 12:58:33.json new file mode 100644 index 0000000000000000000000000000000000000000..8623c94d57d2a1364168a50b622e30b90748183e --- /dev/null +++ b/hyeogi/Yi-6b-dpo-v0.2/result_2023-12-08 12:58:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35238907849829354, + "acc_stderr": 0.013960142600598682, + "acc_norm": 0.41723549488054607, + "acc_norm_stderr": 0.01440982551840308 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3968333001394145, + "acc_stderr": 0.004882410029935438, + "acc_norm": 0.5295757817167894, + "acc_norm_stderr": 0.004981044370530806 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5696040868454662, + "acc_stderr": 0.01770586877629239, + "acc_norm": 0.5696040868454662, + "acc_norm_stderr": 0.01770586877629239 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.031565646822367836, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.031565646822367836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223264, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343118, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860807, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6110091743119266, + "acc_stderr": 0.020902300887392866, + "acc_norm": 0.6110091743119266, + "acc_norm_stderr": 0.020902300887392866 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.019987809769482064, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.019987809769482064 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281285, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281285 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475353, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475353 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3213820078226858, + "acc_stderr": 0.011927581352265076, + "acc_norm": 0.3213820078226858, + "acc_norm_stderr": 0.011927581352265076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3427172582619339, + "mc1_stderr": 0.016614949385347046, + "mc2": 0.5237635137263473, + "mc2_stderr": 0.015260079405506066 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6694214876033058, + "acc_stderr": 0.016173423298845694, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.0158405389325341 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/Yi-6b-dpo-v0.2", + "model_sha": "cfe3e81342b6bccf706170f85d0357f7017572fd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/Yi-6b-dpo-v0.3/result_2023-12-15 20:36:23.json b/hyeogi/Yi-6b-dpo-v0.3/result_2023-12-15 20:36:23.json new file mode 100644 index 0000000000000000000000000000000000000000..aeba9282d6842cad865757f62e3f26ec650668be --- /dev/null +++ b/hyeogi/Yi-6b-dpo-v0.3/result_2023-12-15 20:36:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407163, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39464250149372637, + "acc_stderr": 0.004877748536428437, + "acc_norm": 0.5307707627962557, + "acc_norm_stderr": 0.0049803234000310795 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.017844918090468547, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.017844918090468547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237653, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768818, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768818 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.0344578996436275, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.0344578996436275 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376886, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376886 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5981651376146789, + "acc_stderr": 0.021020106172997006, + "acc_norm": 0.5981651376146789, + "acc_norm_stderr": 0.021020106172997006 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557836, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557836 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.02858034106513829, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.02858034106513829 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.01992211568278668, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.01992211568278668 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115882, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31731843575418994, + "acc_stderr": 0.01556639263005703, + "acc_norm": 0.31731843575418994, + "acc_norm_stderr": 0.01556639263005703 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.48161764705882354, + "acc_stderr": 0.03035230339535196, + "acc_norm": 0.48161764705882354, + "acc_norm_stderr": 0.03035230339535196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.01211793999870587, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.01211793999870587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35862913096695226, + "mc1_stderr": 0.016789289499502025, + "mc2": 0.5302526106032021, + "mc2_stderr": 0.01564565803995267 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6292798110979929, + "acc_stderr": 0.01660580128921261, + "acc_norm": 0.6646989374262101, + "acc_norm_stderr": 0.016230981232989813 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/Yi-6b-dpo-v0.3", + "model_sha": "590fef1d72c0bc3b406410739707b3247ede2cdb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/Yi-6b-dpo-v0.4/result_2023-12-31 07:40:09.json b/hyeogi/Yi-6b-dpo-v0.4/result_2023-12-31 07:40:09.json new file mode 100644 index 0000000000000000000000000000000000000000..94ec1215de4453174c3d51441abd0da7cdc7cb63 --- /dev/null +++ b/hyeogi/Yi-6b-dpo-v0.4/result_2023-12-31 07:40:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.01404910656495501, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650647 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39822744473212507, + "acc_stderr": 0.004885323175701674, + "acc_norm": 0.5345548695478988, + "acc_norm_stderr": 0.0049778511619043946 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5721583652618135, + "acc_stderr": 0.017692787927803728, + "acc_norm": 0.5721583652618135, + "acc_norm_stderr": 0.017692787927803728 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016337, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016337 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.025317649726448652, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.025317649726448652 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540632, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.03011821010694265, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.03011821010694265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6220183486238532, + "acc_stderr": 0.020789187066728117, + "acc_norm": 0.6220183486238532, + "acc_norm_stderr": 0.020789187066728117 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787317, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787317 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372435, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372435 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271486, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.01200209166690231, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.01200209166690231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3659730722154223, + "mc1_stderr": 0.01686294168408836, + "mc2": 0.5390386024707369, + "mc2_stderr": 0.015472303689441313 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5820543093270366, + "acc_stderr": 0.01695729200527971, + "acc_norm": 0.6245572609208973, + "acc_norm_stderr": 0.01664841158951108 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/Yi-6b-dpo-v0.4", + "model_sha": "8a267a46b55cedb026233d7f41db9dd97dca2c36", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/Yi-6b-v0.3/result_2023-12-08 04:28:25.json b/hyeogi/Yi-6b-v0.3/result_2023-12-08 04:28:25.json new file mode 100644 index 0000000000000000000000000000000000000000..cc0bbabcfcfa82aebb2e73e67e62d0f5cae356e0 --- /dev/null +++ b/hyeogi/Yi-6b-v0.3/result_2023-12-08 04:28:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3583617747440273, + "acc_stderr": 0.014012883334859854, + "acc_norm": 0.4283276450511945, + "acc_norm_stderr": 0.014460496367599027 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3972316271659032, + "acc_stderr": 0.004883246579496662, + "acc_norm": 0.5323640709022107, + "acc_norm_stderr": 0.004979317515432522 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5504469987228607, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.5504469987228607, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562793, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562793 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999936, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999936 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752173, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752173 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653333, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.0282863240755644, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.0282863240755644 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6458715596330276, + "acc_stderr": 0.020504729013829104, + "acc_norm": 0.6458715596330276, + "acc_norm_stderr": 0.020504729013829104 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.01984828016840116, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.01984828016840116 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842974, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842974 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.01532182168847619, + "mc2": 0.4048349906269079, + "mc2_stderr": 0.014771877117522413 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5690672963400236, + "acc_stderr": 0.017025558196043136, + "acc_norm": 0.602125147579693, + "acc_norm_stderr": 0.01682795905473339 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/Yi-6b-v0.3", + "model_sha": "754fd3466db3c4713f86ad61a0eabec2aeaa3c57", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/llama2-70b-v0.1/result_2023-12-05 09:36:11.json b/hyeogi/llama2-70b-v0.1/result_2023-12-05 09:36:11.json new file mode 100644 index 0000000000000000000000000000000000000000..2cc5396b8a96bd084ab85f71a99c25da656828bd --- /dev/null +++ b/hyeogi/llama2-70b-v0.1/result_2023-12-05 09:36:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.01397545412275656, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303022 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37442740489942244, + "acc_stderr": 0.004829856058603586, + "acc_norm": 0.4869547898824935, + "acc_norm_stderr": 0.004988082825213275 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.017879248970584353, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.017879248970584353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.0424463323835323, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.0424463323835323 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.034961309720561266, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.034961309720561266 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087764, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087764 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344948, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.039439666991836285, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.039439666991836285 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.024419234966819067, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.024419234966819067 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.02689029788130311, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.02689029788130311 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.02775653525734767, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.02775653525734767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.02835895631342354, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.02835895631342354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829163, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829163 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650154, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650154 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261452, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261452 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824866, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824866 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36310299869621904, + "acc_stderr": 0.012282264406018758, + "acc_norm": 0.36310299869621904, + "acc_norm_stderr": 0.012282264406018758 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.03471157907953426, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.03471157907953426 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087307, + "mc2": 0.44642268659301343, + "mc2_stderr": 0.015149376929354377 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4309327036599764, + "acc_stderr": 0.017025558196043136, + "acc_norm": 0.4805194805194805, + "acc_norm_stderr": 0.017177301992342558 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/llama2-70b-v0.1", + "model_sha": "0b83d3a9260e4adfd644b52d593e8a93d6698aa0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/open-llama2-7b-dpo-v0.1/result_2023-12-16 20:16:49.json b/hyeogi/open-llama2-7b-dpo-v0.1/result_2023-12-16 20:16:49.json new file mode 100644 index 0000000000000000000000000000000000000000..93f1146eaadc395e13bcd2fc1f82b04add24d339 --- /dev/null +++ b/hyeogi/open-llama2-7b-dpo-v0.1/result_2023-12-16 20:16:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31143344709897613, + "acc_stderr": 0.013532472099850947, + "acc_norm": 0.3856655290102389, + "acc_norm_stderr": 0.01422425097325718 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3815972913762199, + "acc_stderr": 0.004847857546957469, + "acc_norm": 0.4929296952798247, + "acc_norm_stderr": 0.004989282516055395 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3269476372924649, + "acc_stderr": 0.016774908180131467, + "acc_norm": 0.3269476372924649, + "acc_norm_stderr": 0.016774908180131467 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.025403832978179604, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.025403832978179604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.04142313771996664, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.04142313771996664 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.03416520447747549, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.03416520447747549 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.02788682807838057, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.02788682807838057 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2, + "acc_stderr": 0.020280805062535722, + "acc_norm": 0.2, + "acc_norm_stderr": 0.020280805062535722 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.047128212574267705, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.047128212574267705 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678243, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678243 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594528, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594528 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.02564410863926762, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.02564410863926762 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788147, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788147 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.02218203720294837, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.02218203720294837 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.024257901705323374, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.024257901705323374 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292405, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292405 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294674, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294674 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26972477064220185, + "acc_stderr": 0.01902848671111544, + "acc_norm": 0.26972477064220185, + "acc_norm_stderr": 0.01902848671111544 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312337, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312337 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.0178831881346672, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.0178831881346672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460983, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460983 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.027467401804058014, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.027467401804058014 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.01493131670322051, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.01493131670322051 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403325, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403325 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.027257202606114948, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.027257202606114948 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784606, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784606 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3206751054852321, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.3206751054852321, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113893, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113893 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693247, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693247 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511784, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511784 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33414932680538556, + "mc1_stderr": 0.016512530677150517, + "mc2": 0.5147595886551624, + "mc2_stderr": 0.015288691882001125 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30342384887839435, + "acc_stderr": 0.01580607271790957, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.01684469351050505 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/open-llama2-7b-dpo-v0.1", + "model_sha": "54f59971e5f2a15c7cd8baff05b20e1c469283f9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/open-llama2-7b-v0.1/result_2023-12-15 23:35:02.json b/hyeogi/open-llama2-7b-v0.1/result_2023-12-15 23:35:02.json new file mode 100644 index 0000000000000000000000000000000000000000..4de78176351df0f6d458d9587ee57c32b1756cb4 --- /dev/null +++ b/hyeogi/open-llama2-7b-v0.1/result_2023-12-15 23:35:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3242320819112628, + "acc_stderr": 0.013678810399518822, + "acc_norm": 0.3967576791808874, + "acc_norm_stderr": 0.014296513020180644 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3833897629954192, + "acc_stderr": 0.00485218262127426, + "acc_norm": 0.5002987452698665, + "acc_norm_stderr": 0.004989780520782245 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245231, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245231 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3116219667943806, + "acc_stderr": 0.016562433867284176, + "acc_norm": 0.3116219667943806, + "acc_norm_stderr": 0.016562433867284176 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071855, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071855 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188957, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188957 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.03318833286217282, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.03318833286217282 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380572, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380572 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.021606294494647727, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.021606294494647727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.29354838709677417, + "acc_stderr": 0.0259060870213193, + "acc_norm": 0.29354838709677417, + "acc_norm_stderr": 0.0259060870213193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32905982905982906, + "acc_stderr": 0.03078232157768816, + "acc_norm": 0.32905982905982906, + "acc_norm_stderr": 0.03078232157768816 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.28679245283018867, + "acc_stderr": 0.027834912527544074, + "acc_norm": 0.28679245283018867, + "acc_norm_stderr": 0.027834912527544074 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173042, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173042 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.025842248700902168, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.025842248700902168 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25321100917431194, + "acc_stderr": 0.018644073041375043, + "acc_norm": 0.25321100917431194, + "acc_norm_stderr": 0.018644073041375043 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.018718067052623216, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.018718067052623216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.024398192986654924, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.024398192986654924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.030486039389105313, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.030486039389105313 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.01098630787004552, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.01098630787004552 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.014948812679062137, + "mc2": 0.4045967942290401, + "mc2_stderr": 0.014756891632525398 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29988193624557263, + "acc_stderr": 0.01575344761542946, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.01705775370216028 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/open-llama2-7b-v0.1", + "model_sha": "0f2714d91a830c5a89ba9f54ed4cc8ba147fafb1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyunseoki/ko-en-llama2-13b/result_2023-10-02 15:05:47.json b/hyunseoki/ko-en-llama2-13b/result_2023-10-02 15:05:47.json new file mode 100644 index 0000000000000000000000000000000000000000..b86f5e56b299d60e1c277a5726a69607e0633354 --- /dev/null +++ b/hyunseoki/ko-en-llama2-13b/result_2023-10-02 15:05:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.01407722310847014, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326021 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4063931487751444, + "acc_stderr": 0.004901558132335531, + "acc_norm": 0.5423222465644294, + "acc_norm_stderr": 0.004971874159777693 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.017869330154003705, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.017869330154003705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236784, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236784 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.02483881198803315, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02483881198803315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091852, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145647, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145647 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.02667925227010311, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.02667925227010311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3005215123859192, + "acc_stderr": 0.011709918883039119, + "acc_norm": 0.3005215123859192, + "acc_norm_stderr": 0.011709918883039119 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.40740955216969593, + "mc2_stderr": 0.01489940591651966 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4557260920897285, + "acc_stderr": 0.017122829143292648, + "acc_norm": 0.5194805194805194, + "acc_norm_stderr": 0.017177301992342544 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyunseoki/ko-en-llama2-13b", + "model_sha": "2768cf6f955b65868ccbb20658e2cc444b2f3be9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyunseoki/ko-ref-llama2-13b/result_2023-10-04 08:17:01.json b/hyunseoki/ko-ref-llama2-13b/result_2023-10-04 08:17:01.json new file mode 100644 index 0000000000000000000000000000000000000000..f35699e65cd1bb3a181457205a6a21bd1e03570e --- /dev/null +++ b/hyunseoki/ko-ref-llama2-13b/result_2023-10-04 08:17:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37457337883959047, + "acc_stderr": 0.014144193471893456, + "acc_norm": 0.43600682593856654, + "acc_norm_stderr": 0.014491225699230916 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3990240987851026, + "acc_stderr": 0.004886969266944274, + "acc_norm": 0.5257916749651463, + "acc_norm_stderr": 0.00498313847960438 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260595, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260595 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.40229885057471265, + "acc_stderr": 0.01753529452906895, + "acc_norm": 0.40229885057471265, + "acc_norm_stderr": 0.01753529452906895 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231008, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231008 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553026, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553026 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648022, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648022 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378948, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378948 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.029597329730978103, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.029597329730978103 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132354, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.046166311118017125, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.046166311118017125 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642749, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642749 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.41452991452991456, + "acc_stderr": 0.03227396567623778, + "acc_norm": 0.41452991452991456, + "acc_norm_stderr": 0.03227396567623778 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695238, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940588, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940588 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.025722802200895803, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.025722802200895803 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.026041766202717163, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.026041766202717163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.03712454853721368, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.03712454853721368 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.326605504587156, + "acc_stderr": 0.020106990889937306, + "acc_norm": 0.326605504587156, + "acc_norm_stderr": 0.020106990889937306 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.11904761904761904, + "acc_stderr": 0.028965535858562975, + "acc_norm": 0.11904761904761904, + "acc_norm_stderr": 0.028965535858562975 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046626, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046626 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4380165289256198, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.018403415710109783, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.018403415710109783 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046937, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2897959183673469, + "acc_stderr": 0.02904308868330433, + "acc_norm": 0.2897959183673469, + "acc_norm_stderr": 0.02904308868330433 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3881856540084388, + "acc_stderr": 0.03172295004332329, + "acc_norm": 0.3881856540084388, + "acc_norm_stderr": 0.03172295004332329 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2985658409387223, + "acc_stderr": 0.011688060141794228, + "acc_norm": 0.2985658409387223, + "acc_norm_stderr": 0.011688060141794228 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03308611113236436, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03308611113236436 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.037818873532059816, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.037818873532059816 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707679, + "mc2": 0.4089327594647445, + "mc2_stderr": 0.01512159542972759 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.34710743801652894, + "acc_stderr": 0.016366945603281276, + "acc_norm": 0.4498229043683589, + "acc_norm_stderr": 0.017103573343825715 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyunseoki/ko-ref-llama2-13b", + "model_sha": "c5d09631c88ab5012b48187ecd90ae773cd4bbd9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyunseoki/ko-ref-llama2-7b/result_2023-10-04 08:18:22.json b/hyunseoki/ko-ref-llama2-7b/result_2023-10-04 08:18:22.json new file mode 100644 index 0000000000000000000000000000000000000000..4dab19767e32f21b991084a1cfef8b87fce4533d --- /dev/null +++ b/hyunseoki/ko-ref-llama2-7b/result_2023-10-04 08:18:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33447098976109213, + "acc_stderr": 0.013787460322441387, + "acc_norm": 0.3848122866894198, + "acc_norm_stderr": 0.0142183710652511 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3836885082652858, + "acc_stderr": 0.0048528966817367606, + "acc_norm": 0.4970125473013344, + "acc_norm_stderr": 0.004989692344313999 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.31417624521072796, + "acc_stderr": 0.01659929173588491, + "acc_norm": 0.31417624521072796, + "acc_norm_stderr": 0.01659929173588491 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370519, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370519 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818784, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818784 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836554, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836554 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993179, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993179 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.030176808288974337, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.030176808288974337 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.02136202772522273, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.02136202772522273 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.02652270967466777, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.02652270967466777 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.031937057262002924, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.031937057262002924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.02560423347089909, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.02560423347089909 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31840796019900497, + "acc_stderr": 0.032941184790540944, + "acc_norm": 0.31840796019900497, + "acc_norm_stderr": 0.032941184790540944 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047876, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047876 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30346820809248554, + "acc_stderr": 0.02475241196091721, + "acc_norm": 0.30346820809248554, + "acc_norm_stderr": 0.02475241196091721 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.025773111169630443, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.025773111169630443 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24403669724770644, + "acc_stderr": 0.018415286351416416, + "acc_norm": 0.24403669724770644, + "acc_norm_stderr": 0.018415286351416416 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818114, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818114 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.02649303322514589, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.02649303322514589 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.017776947157528037, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.017776947157528037 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953185, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16176470588235295, + "acc_stderr": 0.02236867256288675, + "acc_norm": 0.16176470588235295, + "acc_norm_stderr": 0.02236867256288675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2620599739243807, + "acc_stderr": 0.011231552795890394, + "acc_norm": 0.2620599739243807, + "acc_norm_stderr": 0.011231552795890394 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.3953129040998704, + "mc2_stderr": 0.015062425593708578 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29279811097992914, + "acc_stderr": 0.015644823205401337, + "acc_norm": 0.4037780401416765, + "acc_norm_stderr": 0.016869031540298632 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyunseoki/ko-ref-llama2-7b", + "model_sha": "1ee08c79ae7393473754b77e82b1472ef63d5dd2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ifuseok/yi-ko-playtus-instruct-v0.1/result_2023-12-20 04:49:59.json b/ifuseok/yi-ko-playtus-instruct-v0.1/result_2023-12-20 04:49:59.json new file mode 100644 index 0000000000000000000000000000000000000000..96b4b79966141d42d6f96a1e7310c0fc5324141e --- /dev/null +++ b/ifuseok/yi-ko-playtus-instruct-v0.1/result_2023-12-20 04:49:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3122866894197952, + "acc_stderr": 0.013542598541688065, + "acc_norm": 0.3703071672354949, + "acc_norm_stderr": 0.01411129875167495 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3737303326030671, + "acc_stderr": 0.004828045774734903, + "acc_norm": 0.4780920135431189, + "acc_norm_stderr": 0.00498498932064813 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44061302681992337, + "acc_stderr": 0.01775339697390849, + "acc_norm": 0.44061302681992337, + "acc_norm_stderr": 0.01775339697390849 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534422, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534422 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34080717488789236, + "acc_stderr": 0.03181149747055358, + "acc_norm": 0.34080717488789236, + "acc_norm_stderr": 0.03181149747055358 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.032224140452411065, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.032224140452411065 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.02983280811479601, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.02983280811479601 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945266, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945266 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.03533133389323657, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.03533133389323657 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.02375292871211213, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.02375292871211213 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41329479768786126, + "acc_stderr": 0.02651126136940924, + "acc_norm": 0.41329479768786126, + "acc_norm_stderr": 0.02651126136940924 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.027272582849839792, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.027272582849839792 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.021410999753635914, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.021410999753635914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891765, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.01931267606578656, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.01931267606578656 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503803, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503803 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.01457265038340917, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.01457265038340917 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.027778298701545443, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.027778298701545443 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3161668839634941, + "acc_stderr": 0.01187578089438658, + "acc_norm": 0.3161668839634941, + "acc_norm_stderr": 0.01187578089438658 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006504, + "mc2": 0.43992652953593975, + "mc2_stderr": 0.015968344564232163 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39669421487603307, + "acc_stderr": 0.016819438642971408, + "acc_norm": 0.44391971664698937, + "acc_norm_stderr": 0.017081884623542546 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ifuseok/yi-ko-playtus-instruct-v0.1", + "model_sha": "a35b7d833cb87ca41d7380da9411df17e3c4052e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ifuseok/yi-ko-playtus-instruct-v0.2/result_2023-12-27 08:08:48.json b/ifuseok/yi-ko-playtus-instruct-v0.2/result_2023-12-27 08:08:48.json new file mode 100644 index 0000000000000000000000000000000000000000..b4d211fb92742e25c6623a0552db35410e43aeba --- /dev/null +++ b/ifuseok/yi-ko-playtus-instruct-v0.2/result_2023-12-27 08:08:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3430034129692833, + "acc_stderr": 0.013872423223718167, + "acc_norm": 0.41552901023890787, + "acc_norm_stderr": 0.014401366641216391 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3979286994622585, + "acc_stderr": 0.0048847024124560965, + "acc_norm": 0.5294761999601673, + "acc_norm_stderr": 0.004981103157940433 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.017753396973908486, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.017753396973908486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611549, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611549 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126177, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126177 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.02843453315268184, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.02843453315268184 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.03011821010694266, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.03011821010694266 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.430635838150289, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.430635838150289, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.03825825548848608, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.03825825548848608 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.021109128133413913, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.021109128133413913 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208839, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208839 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.02850980780262656, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.02850980780262656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777473, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777473 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.01962744474841224, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.01962744474841224 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331161, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331161 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.011965311536571528, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.011965311536571528 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087302, + "mc2": 0.41178231606045385, + "mc2_stderr": 0.014882012342473607 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682868, + "acc_norm": 0.602125147579693, + "acc_norm_stderr": 0.01682795905473339 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ifuseok/yi-ko-playtus-instruct-v0.2", + "model_sha": "2682d6473035aabe0a0fc7a2ab035659225a1d36", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/igig98/ppo2/result_2023-10-29 13:20:18.json b/igig98/ppo2/result_2023-10-29 13:20:18.json new file mode 100644 index 0000000000000000000000000000000000000000..0a13d7a8b69cce47a7d15f600c5edcde705ad20b --- /dev/null +++ b/igig98/ppo2/result_2023-10-29 13:20:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2909556313993174, + "acc_stderr": 0.013273077865907573, + "acc_norm": 0.3447098976109215, + "acc_norm_stderr": 0.013888816286782112 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3908583947420832, + "acc_stderr": 0.00486945515093382, + "acc_norm": 0.5073690499900418, + "acc_norm_stderr": 0.004989239462835228 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457923, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457923 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2554278416347382, + "acc_stderr": 0.015594955384455765, + "acc_norm": 0.2554278416347382, + "acc_norm_stderr": 0.015594955384455765 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18072289156626506, + "acc_stderr": 0.029955737855810138, + "acc_norm": 0.18072289156626506, + "acc_norm_stderr": 0.029955737855810138 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185416, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185416 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.16143497757847533, + "acc_stderr": 0.02469395789912846, + "acc_norm": 0.16143497757847533, + "acc_norm_stderr": 0.02469395789912846 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.038073871163060866, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.038073871163060866 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786751, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786751 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003337, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003337 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.026653531596715477, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.026653531596715477 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2358974358974359, + "acc_stderr": 0.021525965407408726, + "acc_norm": 0.2358974358974359, + "acc_norm_stderr": 0.021525965407408726 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462826, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462826 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.027778835904935437, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.027778835904935437 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.0264803571798957, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.0264803571798957 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.03831305140884601, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03831305140884601 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959912, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959912 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.208955223880597, + "acc_stderr": 0.028748298931728658, + "acc_norm": 0.208955223880597, + "acc_norm_stderr": 0.028748298931728658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594295, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031715, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031715 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554858, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554858 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724148, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724148 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.025773111169630433, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.025773111169630433 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147602, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147602 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.024404394928087873, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.024404394928087873 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119667, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119667 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318082, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318082 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432407, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432407 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100998, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100998 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960234, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960234 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539264, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539264 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373618, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373618 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875833, + "mc2": 0.4229362575464193, + "mc2_stderr": 0.015023014923371594 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3022432113341204, + "acc_stderr": 0.015788654863022375, + "acc_norm": 0.3447461629279811, + "acc_norm_stderr": 0.016340649905418697 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "igig98/ppo2", + "model_sha": "12b21b4455bffbaea9811bddb74ceb8cb6cc5f8c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/iknow-lab/AULM-12.8b-v0/result_2023-10-14 16:04:08.json b/iknow-lab/AULM-12.8b-v0/result_2023-10-14 16:04:08.json new file mode 100644 index 0000000000000000000000000000000000000000..aef4fe6831e4031ca6caf81f34ba52fe9888152b --- /dev/null +++ b/iknow-lab/AULM-12.8b-v0/result_2023-10-14 16:04:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26706484641638223, + "acc_stderr": 0.012928933196496337, + "acc_norm": 0.3310580204778157, + "acc_norm_stderr": 0.013752062419817836 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37134037044413465, + "acc_stderr": 0.004821757734156723, + "acc_norm": 0.47470623381796456, + "acc_norm_stderr": 0.004983392650570962 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691584, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691584 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.0153023801235421, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.0153023801235421 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066654, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066654 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.026148818018424513, + "acc_norm": 0.2, + "acc_norm_stderr": 0.026148818018424513 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.03329394119073529, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.03329394119073529 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21864951768488747, + "acc_stderr": 0.02347558141786111, + "acc_norm": 0.21864951768488747, + "acc_norm_stderr": 0.02347558141786111 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229136, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229136 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533953, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533953 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233483, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233483 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24193548387096775, + "acc_stderr": 0.024362599693031093, + "acc_norm": 0.24193548387096775, + "acc_norm_stderr": 0.024362599693031093 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02934311479809445, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02934311479809445 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.026616482980501704, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.026616482980501704 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766114, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21393034825870647, + "acc_stderr": 0.028996909693328927, + "acc_norm": 0.21393034825870647, + "acc_norm_stderr": 0.028996909693328927 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.03063114553919882, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.03063114553919882 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.021679219663693138, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.021679219663693138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.023618678310069363, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.023618678310069363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.02465968518596729, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.02465968518596729 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.029519282616817244, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.029519282616817244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25688073394495414, + "acc_stderr": 0.01873249292834247, + "acc_norm": 0.25688073394495414, + "acc_norm_stderr": 0.01873249292834247 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.02495418432487991, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.02495418432487991 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.017479487001364764, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.017479487001364764 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953777, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953777 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19907407407407407, + "acc_stderr": 0.027232298462690218, + "acc_norm": 0.19907407407407407, + "acc_norm_stderr": 0.027232298462690218 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409162, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409162 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254184, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254184 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866767, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866767 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25097783572359844, + "acc_stderr": 0.011073730299187234, + "acc_norm": 0.25097783572359844, + "acc_norm_stderr": 0.011073730299187234 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.031145570659486782, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.031145570659486782 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624335, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624335 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4336773026110262, + "mc2_stderr": 0.01517918566270363 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29515938606847697, + "acc_stderr": 0.015681535229192186, + "acc_norm": 0.3659976387249115, + "acc_norm_stderr": 0.016561489664895696 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "iknow-lab/AULM-12.8b-v0", + "model_sha": "daeca40346ba44b1fbb6939cc635adf467fa6cab", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ingeol/ppo_test/result_2023-10-16 23:46:09.json b/ingeol/ppo_test/result_2023-10-16 23:46:09.json new file mode 100644 index 0000000000000000000000000000000000000000..20db94820abbd896657d47bbd439814cc1da115e --- /dev/null +++ b/ingeol/ppo_test/result_2023-10-16 23:46:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29266211604095566, + "acc_stderr": 0.013295916103619404, + "acc_norm": 0.3438566552901024, + "acc_norm_stderr": 0.013880644570156213 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39026090420235016, + "acc_stderr": 0.004868117598481941, + "acc_norm": 0.5064728141804421, + "acc_norm_stderr": 0.00498936327695524 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824564, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824564 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25287356321839083, + "acc_stderr": 0.015543377313719681, + "acc_norm": 0.25287356321839083, + "acc_norm_stderr": 0.015543377313719681 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.026355158413349407, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.026355158413349407 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18072289156626506, + "acc_stderr": 0.02995573785581014, + "acc_norm": 0.18072289156626506, + "acc_norm_stderr": 0.02995573785581014 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.026664410886937613, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.026664410886937613 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.16591928251121077, + "acc_stderr": 0.024967553196547157, + "acc_norm": 0.16591928251121077, + "acc_norm_stderr": 0.024967553196547157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713549, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713549 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882378, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.02127839386358628, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.02127839386358628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617732, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617732 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.025284416114900156, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.025284416114900156 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741695, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741695 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.19090909090909092, + "acc_stderr": 0.03764425585984927, + "acc_norm": 0.19090909090909092, + "acc_norm_stderr": 0.03764425585984927 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184766, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184766 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.023357365785874037, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.023357365785874037 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724148, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724148 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.02570264026060376, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.02570264026060376 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147602, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147602 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.0181256691808615, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.0181256691808615 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119667, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119667 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266736, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266736 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402545, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402545 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2659217877094972, + "acc_stderr": 0.014776765066438895, + "acc_norm": 0.2659217877094972, + "acc_norm_stderr": 0.014776765066438895 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396563, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396563 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27835723598435463, + "acc_stderr": 0.011446990197380982, + "acc_norm": 0.27835723598435463, + "acc_norm_stderr": 0.011446990197380982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083292, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083292 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842883, + "mc2": 0.4208363898748992, + "mc2_stderr": 0.014946599322770709 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30578512396694213, + "acc_stderr": 0.0158405389325341, + "acc_norm": 0.36481700118063753, + "acc_norm_stderr": 0.016550144337046595 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ingeol/ppo_test", + "model_sha": "ec1c89b180c1eb383c5a348b4d113733c3e8e238", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ingeol/ppo_test/result_2023-10-16 23:46:16.json b/ingeol/ppo_test/result_2023-10-16 23:46:16.json new file mode 100644 index 0000000000000000000000000000000000000000..20db94820abbd896657d47bbd439814cc1da115e --- /dev/null +++ b/ingeol/ppo_test/result_2023-10-16 23:46:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29266211604095566, + "acc_stderr": 0.013295916103619404, + "acc_norm": 0.3438566552901024, + "acc_norm_stderr": 0.013880644570156213 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39026090420235016, + "acc_stderr": 0.004868117598481941, + "acc_norm": 0.5064728141804421, + "acc_norm_stderr": 0.00498936327695524 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824564, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824564 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25287356321839083, + "acc_stderr": 0.015543377313719681, + "acc_norm": 0.25287356321839083, + "acc_norm_stderr": 0.015543377313719681 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.026355158413349407, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.026355158413349407 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18072289156626506, + "acc_stderr": 0.02995573785581014, + "acc_norm": 0.18072289156626506, + "acc_norm_stderr": 0.02995573785581014 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.026664410886937613, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.026664410886937613 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.16591928251121077, + "acc_stderr": 0.024967553196547157, + "acc_norm": 0.16591928251121077, + "acc_norm_stderr": 0.024967553196547157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713549, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713549 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882378, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.02127839386358628, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.02127839386358628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617732, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617732 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.025284416114900156, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.025284416114900156 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741695, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741695 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.19090909090909092, + "acc_stderr": 0.03764425585984927, + "acc_norm": 0.19090909090909092, + "acc_norm_stderr": 0.03764425585984927 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184766, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184766 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.023357365785874037, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.023357365785874037 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724148, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724148 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.02570264026060376, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.02570264026060376 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147602, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147602 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.0181256691808615, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.0181256691808615 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119667, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119667 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266736, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266736 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402545, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402545 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2659217877094972, + "acc_stderr": 0.014776765066438895, + "acc_norm": 0.2659217877094972, + "acc_norm_stderr": 0.014776765066438895 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396563, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396563 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27835723598435463, + "acc_stderr": 0.011446990197380982, + "acc_norm": 0.27835723598435463, + "acc_norm_stderr": 0.011446990197380982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083292, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083292 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842883, + "mc2": 0.4208363898748992, + "mc2_stderr": 0.014946599322770709 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30578512396694213, + "acc_stderr": 0.0158405389325341, + "acc_norm": 0.36481700118063753, + "acc_norm_stderr": 0.016550144337046595 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ingeol/ppo_test", + "model_sha": "ec1c89b180c1eb383c5a348b4d113733c3e8e238", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ingeol/sft_merged/result_2023-10-15 17:37:34.json b/ingeol/sft_merged/result_2023-10-15 17:37:34.json new file mode 100644 index 0000000000000000000000000000000000000000..378acc6761a5ebbb338fb8a0ef7741ac9ee1c0a1 --- /dev/null +++ b/ingeol/sft_merged/result_2023-10-15 17:37:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2977815699658703, + "acc_stderr": 0.01336308010724449, + "acc_norm": 0.3395904436860068, + "acc_norm_stderr": 0.013839039762820167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39055964947221666, + "acc_stderr": 0.004868787333436579, + "acc_norm": 0.5038836885082653, + "acc_norm_stderr": 0.004989630887066195 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2515964240102171, + "acc_stderr": 0.015517322365529631, + "acc_norm": 0.2515964240102171, + "acc_norm_stderr": 0.015517322365529631 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2170212765957447, + "acc_stderr": 0.026947483121496245, + "acc_norm": 0.2170212765957447, + "acc_norm_stderr": 0.026947483121496245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21084337349397592, + "acc_stderr": 0.0317555478662992, + "acc_norm": 0.21084337349397592, + "acc_norm_stderr": 0.0317555478662992 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3215434083601286, + "acc_stderr": 0.026527724079528872, + "acc_norm": 0.3215434083601286, + "acc_norm_stderr": 0.026527724079528872 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.15695067264573992, + "acc_stderr": 0.02441358717490739, + "acc_norm": 0.15695067264573992, + "acc_norm_stderr": 0.02441358717490739 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969174, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969174 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713549, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713549 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438015, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438015 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868966, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.02127839386358628, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.02127839386358628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293752, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.02468597928623997, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.02468597928623997 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.028286324075564386, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.028286324075564386 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.026616482980501715, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.026616482980501715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360383, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360383 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047873, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047873 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184766, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184766 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071138, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071138 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724148, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724148 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.02570264026060376, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.02570264026060376 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343588, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343588 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341016, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341016 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03782728980865469, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03782728980865469 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318082, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318082 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432407, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432407 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298804, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369922, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369922 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898445, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142783, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087298, + "mc2": 0.4211117529867161, + "mc2_stderr": 0.014959536407311791 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31286894923258557, + "acc_stderr": 0.015941010118302658, + "acc_norm": 0.3754427390791027, + "acc_norm_stderr": 0.016648411589511098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ingeol/sft_merged", + "model_sha": "a958e5054c1935e86f418c797825ebccb9e7fd89", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ingeol/sft_merged_660/result_2023-10-15 23:44:57.json b/ingeol/sft_merged_660/result_2023-10-15 23:44:57.json new file mode 100644 index 0000000000000000000000000000000000000000..8b41a5d3aabbaca36068da0b0bac7cc85fd85c71 --- /dev/null +++ b/ingeol/sft_merged_660/result_2023-10-15 23:44:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537364, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785564 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3910575582553276, + "acc_stderr": 0.00486989929773455, + "acc_norm": 0.5030870344552878, + "acc_norm_stderr": 0.004989686307484551 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.03401052620104088, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.03401052620104088 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23371647509578544, + "acc_stderr": 0.015133383278988832, + "acc_norm": 0.23371647509578544, + "acc_norm_stderr": 0.015133383278988832 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.02818544130123409, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.02818544130123409 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3215434083601286, + "acc_stderr": 0.026527724079528872, + "acc_norm": 0.3215434083601286, + "acc_norm_stderr": 0.026527724079528872 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.15695067264573992, + "acc_stderr": 0.024413587174907405, + "acc_norm": 0.15695067264573992, + "acc_norm_stderr": 0.024413587174907405 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836555, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.037800192304380156, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.037800192304380156 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24102564102564103, + "acc_stderr": 0.021685546665333188, + "acc_norm": 0.24102564102564103, + "acc_norm_stderr": 0.021685546665333188 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239973, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.028286324075564393, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.028286324075564393 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.02674989977124124, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.02674989977124124 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.038950910157241364, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.038950910157241364 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.029929415408348384, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.029929415408348384 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047873, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047873 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.021591269407823778, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.021591269407823778 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071145, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071145 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02584224870090218, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02584224870090218 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.018987462257978652, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.018987462257978652 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818115, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818115 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888142, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888142 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537766, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537766 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.02981263070156974, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.02981263070156974 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.02812342933514278, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.02812342933514278 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.011328734403140332, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.011328734403140332 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608753, + "mc2": 0.42256277632208605, + "mc2_stderr": 0.014988663316140667 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30342384887839435, + "acc_stderr": 0.015806072717909573, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.016756921571069415 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ingeol/sft_merged_660", + "model_sha": "2426d1b6f2940a808b68c578e0fafdab1a515707", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.10/result_2023-12-02 00:04:03.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.10/result_2023-12-02 00:04:03.json new file mode 100644 index 0000000000000000000000000000000000000000..c6eda4afd416e7b0f9fe837e8ef61e72e6fdeae7 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.10/result_2023-12-02 00:04:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42541326428998205, + "acc_stderr": 0.0049339509533808945, + "acc_norm": 0.5722963553077076, + "acc_norm_stderr": 0.004937345081868089 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5146871008939975, + "acc_stderr": 0.017872248024429122, + "acc_norm": 0.5146871008939975, + "acc_norm_stderr": 0.017872248024429122 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085328, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085328 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767766, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767766 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969567, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.03000048544867599, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.03000048544867599 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048488, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048488 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637793, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637793 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818115, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818115 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891776, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891776 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505518, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505518 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005344, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005344 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.011773980329380708, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.011773980329380708 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.034602283272391704, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.034602283272391704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715008, + "mc2": 0.3934309385509067, + "mc2_stderr": 0.014616103785255416 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.01716818720142925, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.01707725413155622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.10", + "model_sha": "4108403445d56ccc6adb1f1c4e3d4a9e50f1e95f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.11/result_2023-12-02 00:04:29.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.11/result_2023-12-02 00:04:29.json new file mode 100644 index 0000000000000000000000000000000000000000..1db123263727b67f02fc97e408f5e1918adae8b5 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.11/result_2023-12-02 00:04:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3856655290102389, + "acc_stderr": 0.014224250973257184, + "acc_norm": 0.45733788395904434, + "acc_norm_stderr": 0.014558106543924065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4122684724158534, + "acc_stderr": 0.0049123700239130175, + "acc_norm": 0.5567616012746465, + "acc_norm_stderr": 0.004957524197900413 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.033661244890514495, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.033661244890514495 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489359, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489359 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215923, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215923 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.0278079901413202, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.0278079901413202 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.031798763421768524, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.031798763421768524 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016643, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016643 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704715993, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704715993 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.424161773343599, + "mc2_stderr": 0.01477730596837744 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.01706769977431298, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.017052633559856065 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.11", + "model_sha": "11119fbc9382e06b75e210a028fc72307551a508", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.12/result_2023-12-02 00:04:34.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.12/result_2023-12-02 00:04:34.json new file mode 100644 index 0000000000000000000000000000000000000000..2968aa7e74af7f7cb6d912cdef150d641f6c51b6 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.12/result_2023-12-02 00:04:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38822525597269625, + "acc_stderr": 0.01424161420741405, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.01454922110517187 + }, + "harness|ko_hellaswag|10": { + "acc": 0.416849233220474, + "acc_stderr": 0.004920298437884909, + "acc_norm": 0.5608444532961562, + "acc_norm_stderr": 0.004952698802275644 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.017867695938429774, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.017867695938429774 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.040287315329475604, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.040287315329475604 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236153, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236153 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360385, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360385 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918407, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918407 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4653179190751445, + "acc_stderr": 0.026854257928258882, + "acc_norm": 0.4653179190751445, + "acc_norm_stderr": 0.026854257928258882 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5229357798165137, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.5229357798165137, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928724, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928724 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290313, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290313 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.0387813988879761, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.0387813988879761 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215927, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215927 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190714, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3194263363754889, + "acc_stderr": 0.011908357176756153, + "acc_norm": 0.3194263363754889, + "acc_norm_stderr": 0.011908357176756153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.4373212814432078, + "mc2_stderr": 0.014894808317447994 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45218417945690675, + "acc_stderr": 0.017111567130916785, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972202 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.12", + "model_sha": "98b594a5b23d281b5d562d9bca39cdb7bbcd5bed", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.13/result_2023-12-02 00:09:38.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.13/result_2023-12-02 00:09:38.json new file mode 100644 index 0000000000000000000000000000000000000000..6c9d85bea389fcc456a48bd46f3cfd87ce199f36 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.13/result_2023-12-02 00:09:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40187713310580203, + "acc_stderr": 0.014327268614578278, + "acc_norm": 0.45733788395904434, + "acc_norm_stderr": 0.014558106543924058 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42491535550687115, + "acc_stderr": 0.004933198776700267, + "acc_norm": 0.5699063931487751, + "acc_norm_stderr": 0.004940771559475496 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5351213282247765, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.5351213282247765, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.02498535492310231, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.02498535492310231 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.03125610824421881, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.03125610824421881 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275798, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275798 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5376146788990825, + "acc_stderr": 0.021376575274397576, + "acc_norm": 0.5376146788990825, + "acc_norm_stderr": 0.021376575274397576 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.02799672318063146, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.02799672318063146 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412243, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412243 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826828, + "mc2": 0.4144614046981997, + "mc2_stderr": 0.014804144568912091 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489283, + "acc_norm": 0.5525383707201889, + "acc_norm_stderr": 0.01709519030150058 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.13", + "model_sha": "957a247ca02f1ff29ed75871a13f4f46f9672b62", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.21/result_2023-12-19 01:54:11.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.21/result_2023-12-19 01:54:11.json new file mode 100644 index 0000000000000000000000000000000000000000..79e286d627305029d80486cf3680b452cd97c044 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.21/result_2023-12-19 01:54:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40273037542662116, + "acc_stderr": 0.014332236306790152, + "acc_norm": 0.4684300341296928, + "acc_norm_stderr": 0.01458223646086698 + }, + "harness|ko_hellaswag|10": { + "acc": 0.420035849432384, + "acc_stderr": 0.004925556104679419, + "acc_norm": 0.5699063931487751, + "acc_norm_stderr": 0.0049407715594755 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.049486373240266356, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.049486373240266356 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5389527458492975, + "acc_stderr": 0.017825621793239006, + "acc_norm": 0.5389527458492975, + "acc_norm_stderr": 0.017825621793239006 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849734, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353985, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353985 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577454, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577454 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560534, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.333116036505867, + "acc_stderr": 0.01203793045151205, + "acc_norm": 0.333116036505867, + "acc_norm_stderr": 0.01203793045151205 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156472, + "mc2": 0.40602040127616007, + "mc2_stderr": 0.014779225998703538 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45336481700118064, + "acc_stderr": 0.01711541822522687, + "acc_norm": 0.5596221959858324, + "acc_norm_stderr": 0.017067699774312984 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.21", + "model_sha": "0f7564ff085f7eedbefdd13b0e390b0eca29f9d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.22/result_2023-12-18 04:57:44.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.22/result_2023-12-18 04:57:44.json new file mode 100644 index 0000000000000000000000000000000000000000..479409ec3b86eccc35b2e444a75be70f4b43db04 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.22/result_2023-12-18 04:57:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.014441889627464396, + "acc_norm": 0.48890784982935154, + "acc_norm_stderr": 0.014607794914013041 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44722166899024096, + "acc_stderr": 0.00496190494917139, + "acc_norm": 0.5941047600079665, + "acc_norm_stderr": 0.004900608529778609 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.031489558297455304, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.031489558297455304 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828065, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828065 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836928, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230193, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230193 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655812, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655812 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319563, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319563 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829163, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829163 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952688, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952688 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.01457265038340916, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.01457265038340916 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254167, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254167 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.03191282052669278, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.03191282052669278 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301854, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301854 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786683, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786683 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33414932680538556, + "mc1_stderr": 0.016512530677150517, + "mc2": 0.5007120389559494, + "mc2_stderr": 0.015469844656182272 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46871310507674147, + "acc_stderr": 0.01715666685978546, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.0171427361176433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.22", + "model_sha": "d097a2107108c56e1e64d56df2650ad1005f15a6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.23/result_2023-12-18 06:20:23.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.23/result_2023-12-18 06:20:23.json new file mode 100644 index 0000000000000000000000000000000000000000..d1d51d3ac72ef9adaec04c52fcd828a23545ec50 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.23/result_2023-12-18 06:20:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41467576791808874, + "acc_stderr": 0.014397070564409172, + "acc_norm": 0.4735494880546075, + "acc_norm_stderr": 0.014590931358120163 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44503087034455285, + "acc_stderr": 0.004959535443170612, + "acc_norm": 0.6004779924317865, + "acc_norm_stderr": 0.004887991225950278 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.01773647083780069, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.01773647083780069 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806231, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806231 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617748, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617748 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650776, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650776 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009794, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009794 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230182, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230182 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5578034682080925, + "acc_stderr": 0.0267386036438074, + "acc_norm": 0.5578034682080925, + "acc_norm_stderr": 0.0267386036438074 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.03919415545048411, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.03919415545048411 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5246913580246914, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.5246913580246914, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.02116242004827351, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.02116242004827351 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.01992211568278667, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.01992211568278667 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396587, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713671, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713671 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702358, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702358 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316496, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316496 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.47413164678266917, + "mc2_stderr": 0.015081026411493382 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.01718832921965428, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.017072525875563106 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.23", + "model_sha": "d67bdf521bd71da0aa1c5070c4f0dc89e988344c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.24/result_2023-12-18 23:59:33.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.24/result_2023-12-18 23:59:33.json new file mode 100644 index 0000000000000000000000000000000000000000..4fe52bc529cdbf84585734f1bc8f1dce5be1851a --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.24/result_2023-12-18 23:59:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.014441889627464396, + "acc_norm": 0.4880546075085324, + "acc_norm_stderr": 0.014607220340597171 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4508066122286397, + "acc_stderr": 0.00496557224680386, + "acc_norm": 0.6041625174268074, + "acc_norm_stderr": 0.004880303863138504 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.02506909438729654, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.02506909438729654 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717861, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717861 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.039158572914369714, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.039158572914369714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319563, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319563 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.028452639985088006, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.028452639985088006 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449845, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449845 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767867, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767867 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.027778298701545443, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.027778298701545443 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741515, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741515 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35006119951040393, + "mc1_stderr": 0.01669794942015103, + "mc2": 0.5095614732032704, + "mc2_stderr": 0.01563779443660761 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998567, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.24", + "model_sha": "e6d5f42930c2e1d2310474735a5358c546f767ce", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.25/result_2023-12-19 02:06:59.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.25/result_2023-12-19 02:06:59.json new file mode 100644 index 0000000000000000000000000000000000000000..0c6d57612f70907d66bb596a4be92fe71b50b708 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.25/result_2023-12-19 02:06:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41723549488054607, + "acc_stderr": 0.014409825518403079, + "acc_norm": 0.48464163822525597, + "acc_norm_stderr": 0.014604496129394915 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4494124676359291, + "acc_stderr": 0.004964177035221415, + "acc_norm": 0.6041625174268074, + "acc_norm_stderr": 0.004880303863138502 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638627, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638627 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.02506909438729654, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.02506909438729654 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568392, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568392 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.02663653974111609, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.02663653974111609 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602592, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777473, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777473 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750188, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750188 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639882, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824845, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824845 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713671, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713671 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03068582059661081, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03068582059661081 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3474576271186441, + "acc_stderr": 0.012161417729749806, + "acc_norm": 0.3474576271186441, + "acc_norm_stderr": 0.012161417729749806 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.03804913653971011, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.03804913653971011 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31701346389228885, + "mc1_stderr": 0.016289203374403392, + "mc2": 0.4867878475809202, + "mc2_stderr": 0.015343742215624163 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.25", + "model_sha": "677d2c24efb0f3d0568944dfde58795cbb21b16b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.3/result_2023-11-30 11:57:48.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.3/result_2023-11-30 11:57:48.json new file mode 100644 index 0000000000000000000000000000000000000000..3c4ae177764c7c1bfc44b01a88ebe0a716623f2a --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.3/result_2023-11-30 11:57:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3728668941979522, + "acc_stderr": 0.014131176760131174, + "acc_norm": 0.4462457337883959, + "acc_norm_stderr": 0.014526705548539982 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41794463254331804, + "acc_stderr": 0.004922129568919583, + "acc_norm": 0.5683130850428202, + "acc_norm_stderr": 0.004942990623131124 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.51213282247765, + "acc_stderr": 0.017874698667491338, + "acc_norm": 0.51213282247765, + "acc_norm_stderr": 0.017874698667491338 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223264, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.043171711948702535, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.043171711948702535 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.025124653525885138, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.025124653525885138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.030197611600197953, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.030197611600197953 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.02719593480408563, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.02719593480408563 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159665, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159665 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4653179190751445, + "acc_stderr": 0.026854257928258875, + "acc_norm": 0.4653179190751445, + "acc_norm_stderr": 0.026854257928258875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.021436420955529428, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.021436420955529428 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424506, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424506 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.01939305840235544, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.01939305840235544 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053479, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053479 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.031912820526692774, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.031912820526692774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3161668839634941, + "acc_stderr": 0.011875780894386578, + "acc_norm": 0.3161668839634941, + "acc_norm_stderr": 0.011875780894386578 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.3961332596017898, + "mc2_stderr": 0.01470715664536681 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.017189383627229687, + "acc_norm": 0.5596221959858324, + "acc_norm_stderr": 0.01706769977431297 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.3", + "model_sha": "edba15648cad18bb50c8f586e984742dfa7609e1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.4/result_2023-11-30 13:50:44.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.4/result_2023-11-30 13:50:44.json new file mode 100644 index 0000000000000000000000000000000000000000..49969c198b62a1e3ba9c8faf5b955ebcec08396d --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.4/result_2023-11-30 13:50:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.014212444980651889, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804243 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4217287392949612, + "acc_stderr": 0.004928263494616731, + "acc_norm": 0.5674168492332204, + "acc_norm_stderr": 0.00494421593702139 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49680715197956576, + "acc_stderr": 0.017879598945933068, + "acc_norm": 0.49680715197956576, + "acc_norm_stderr": 0.017879598945933068 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357773, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357773 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.03521224908841583, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.03521224908841583 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424385, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424385 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.03295797566311271, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.03295797566311271 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5811965811965812, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.5811965811965812, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.03000048544867599, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.03000048544867599 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.03567603799639172, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.03567603799639172 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.026830805998952243, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.026830805998952243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662737, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.03541508578884019, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.03541508578884019 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44770642201834865, + "acc_stderr": 0.021319754962425462, + "acc_norm": 0.44770642201834865, + "acc_norm_stderr": 0.021319754962425462 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.01933314202079706, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.01933314202079706 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983583, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983583 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.303129074315515, + "acc_stderr": 0.011738669951254296, + "acc_norm": 0.303129074315515, + "acc_norm_stderr": 0.011738669951254296 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.4115946865899359, + "mc2_stderr": 0.014692840096098678 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.512396694214876, + "acc_stderr": 0.01718506973267654, + "acc_norm": 0.577331759149941, + "acc_norm_stderr": 0.016983506079577607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.4", + "model_sha": "b750a1bafd65119569927ea34d464a6c707a433a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.5/result_2023-11-30 16:16:06.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.5/result_2023-11-30 16:16:06.json new file mode 100644 index 0000000000000000000000000000000000000000..69a7aef17fcc9e1fef571de4beea3c0c3cfda087 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.5/result_2023-11-30 16:16:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39334470989761094, + "acc_stderr": 0.01427510146569302, + "acc_norm": 0.4590443686006826, + "acc_norm_stderr": 0.01456229107360123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43995220075682134, + "acc_stderr": 0.0049536670286543846, + "acc_norm": 0.5958972316271659, + "acc_norm_stderr": 0.004897146690596255 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.36893203883495146, + "acc_stderr": 0.04777615181156739, + "acc_norm": 0.36893203883495146, + "acc_norm_stderr": 0.04777615181156739 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4942528735632184, + "acc_stderr": 0.017878782326129234, + "acc_norm": 0.4942528735632184, + "acc_norm_stderr": 0.017878782326129234 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.04260735157644559, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.04260735157644559 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3967741935483871, + "acc_stderr": 0.02783123160576794, + "acc_norm": 0.3967741935483871, + "acc_norm_stderr": 0.02783123160576794 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.03271298896811159, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.03271298896811159 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.030102793781791194, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.030102793781791194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005135, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.02141099975363592, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.02141099975363592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283693, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283693 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.019542101564854125, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.019542101564854125 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.031251275910891656, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.031251275910891656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.011559337355708509, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.011559337355708509 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4538096352988952, + "mc2_stderr": 0.015290893328767008 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4344746162927981, + "acc_stderr": 0.017042098620824935, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.5", + "model_sha": "4f3438e97f69f93269a2f78e6678647d45dd0e47", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.8/result_2023-12-01 15:29:09.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.8/result_2023-12-01 15:29:09.json new file mode 100644 index 0000000000000000000000000000000000000000..7445ba35d961d3baaad39aab9589d5e68e95303b --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.8/result_2023-12-01 15:29:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38054607508532423, + "acc_stderr": 0.014188277712349812, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955262 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4138617805218084, + "acc_stderr": 0.0049151774069562575, + "acc_norm": 0.5646285600477993, + "acc_norm_stderr": 0.00494792269268884 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5236270753512133, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.5236270753512133, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489425, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489425 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177505, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177505 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.033442837442804574, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.033442837442804574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.02804098138076155, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.02804098138076155 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048487, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048487 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361823, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361823 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489358, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489358 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.021406952688151577, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.021406952688151577 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.019162418588623553, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.019162418588623553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.040598672469526864, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.040598672469526864 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312548, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312548 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.03241920684693335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.03241920684693335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698607, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698607 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.034602283272391704, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.034602283272391704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087297, + "mc2": 0.4221992902902898, + "mc2_stderr": 0.014789127497911234 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.5430932703659976, + "acc_norm_stderr": 0.017126389093086784 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.8", + "model_sha": "101fcd5b704a9994471805741f9da3f7f4959088", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.9/result_2023-12-01 15:26:25.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.9/result_2023-12-01 15:26:25.json new file mode 100644 index 0000000000000000000000000000000000000000..b07403e951fa83c14f77c31d8c2faddf33599838 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.9/result_2023-12-01 15:26:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.363481228668942, + "acc_stderr": 0.014056207319068287, + "acc_norm": 0.43430034129692835, + "acc_norm_stderr": 0.014484703048857359 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4124676359290978, + "acc_stderr": 0.004912723848944788, + "acc_norm": 0.5605457080262896, + "acc_norm_stderr": 0.004953063404791439 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041694, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041694 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5057471264367817, + "acc_stderr": 0.01787878232612923, + "acc_norm": 0.5057471264367817, + "acc_norm_stderr": 0.01787878232612923 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4494949494949495, + "acc_stderr": 0.03544132491947969, + "acc_norm": 0.4494949494949495, + "acc_norm_stderr": 0.03544132491947969 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767766, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767766 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969567, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.0355068398916558, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.0355068398916558 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206177, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39896373056994816, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.39896373056994816, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.021244146569074338, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.021244146569074338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302898, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302898 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223977, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626978, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626978 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28552803129074317, + "acc_stderr": 0.01153575158666565, + "acc_norm": 0.28552803129074317, + "acc_norm_stderr": 0.01153575158666565 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.03402272044340705, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.03402272044340705 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.41064069519153584, + "mc2_stderr": 0.014727550409349975 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42384887839433294, + "acc_stderr": 0.01698981083462825, + "acc_norm": 0.538370720188902, + "acc_norm_stderr": 0.01713966022184555 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.9", + "model_sha": "7d87974397be753ca5759d09c0688cc126becb31", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-yi-ko-6b-v0.9.16/result_2023-12-12 06:49:39.json b/inswave/AISquare-Instruct-yi-ko-6b-v0.9.16/result_2023-12-12 06:49:39.json new file mode 100644 index 0000000000000000000000000000000000000000..000116332a65b1ed06f5664a61df558f2c2049d4 --- /dev/null +++ b/inswave/AISquare-Instruct-yi-ko-6b-v0.9.16/result_2023-12-12 06:49:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36945392491467577, + "acc_stderr": 0.014104578366491887, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256524 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4115714001194981, + "acc_stderr": 0.0049111251010646425, + "acc_norm": 0.5487950607448715, + "acc_norm_stderr": 0.004965963647210317 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.01772458938967779, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.01772458938967779 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511326, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511326 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016337, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016337 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681848, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681848 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.03077265364207567, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.03077265364207567 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899207, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420078, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420078 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604674, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604674 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.019933627776857428, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.019933627776857428 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176852, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176852 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.012002091666902307, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.012002091666902307 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.038517163193983926, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.038517163193983926 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.4369384111906916, + "mc2_stderr": 0.01525557244220662 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5360094451003542, + "acc_stderr": 0.01714571536548667, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-yi-ko-6b-v0.9.16", + "model_sha": "cbec29938730e1d3ac36c931bd5b1ee275d3dae0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-yi-ko-6b-v0.9.26/result_2023-12-21 01:20:21.json b/inswave/AISquare-Instruct-yi-ko-6b-v0.9.26/result_2023-12-21 01:20:21.json new file mode 100644 index 0000000000000000000000000000000000000000..b210c4c83d06bae9b36456860b2275983a065835 --- /dev/null +++ b/inswave/AISquare-Instruct-yi-ko-6b-v0.9.26/result_2023-12-21 01:20:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038078, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.014471133392642468 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40798645688109936, + "acc_stderr": 0.004904561795919, + "acc_norm": 0.5443138816968731, + "acc_norm_stderr": 0.004970145708187995 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5849297573435505, + "acc_stderr": 0.01762013700365528, + "acc_norm": 0.5849297573435505, + "acc_norm_stderr": 0.01762013700365528 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933907, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933907 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942656, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425086, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425086 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.02680372058320617, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.02680372058320617 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6055045871559633, + "acc_stderr": 0.020954642108587492, + "acc_norm": 0.6055045871559633, + "acc_norm_stderr": 0.020954642108587492 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271765, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271765 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291517, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291517 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289804, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364545, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364545 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301847, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540476, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540476 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.038517163193983926, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.038517163193983926 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32313341493268055, + "mc1_stderr": 0.016371836286454604, + "mc2": 0.4670658990793913, + "mc2_stderr": 0.01522338794267629 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5631641086186541, + "acc_stderr": 0.017052633559856065, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.0168194386429714 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-yi-ko-6b-v0.9.26", + "model_sha": "3e0b1aaecaf0b1ca18382f799245a65f79177a21", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jaekwanyda/Yi-Ko-6B_KO_Open-Platypus/result_2023-12-29 06:19:39.json b/jaekwanyda/Yi-Ko-6B_KO_Open-Platypus/result_2023-12-29 06:19:39.json new file mode 100644 index 0000000000000000000000000000000000000000..7f6c96fcbba07c0c855294bea38d3342be670e92 --- /dev/null +++ b/jaekwanyda/Yi-Ko-6B_KO_Open-Platypus/result_2023-12-29 06:19:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3395904436860068, + "acc_stderr": 0.013839039762820167, + "acc_norm": 0.40017064846416384, + "acc_norm_stderr": 0.014317197787809174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39543915554670384, + "acc_stderr": 0.004879455474663811, + "acc_norm": 0.530870344552878, + "acc_norm_stderr": 0.0049802620254724775 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.017821994096933535, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.017821994096933535 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.02834504586484062, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.02834504586484062 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182087, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182087 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.033442837442804574, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.033442837442804574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556552, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556552 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.034457899643627485, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.034457899643627485 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.024026846392873502, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.024026846392873502 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539284, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539284 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5724770642201835, + "acc_stderr": 0.021210910204300434, + "acc_norm": 0.5724770642201835, + "acc_norm_stderr": 0.021210910204300434 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528784, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528784 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061173, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061173 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467761, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467761 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210746, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210746 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.029923100563683906, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.029923100563683906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3122555410691004, + "acc_stderr": 0.01183579813568318, + "acc_norm": 0.3122555410691004, + "acc_norm_stderr": 0.01183579813568318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606787, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606787 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522512, + "mc2": 0.4100825053117308, + "mc2_stderr": 0.014781636083926547 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.017188329219654287, + "acc_norm": 0.5962219598583235, + "acc_norm_stderr": 0.01686903154029863 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jaekwanyda/Yi-Ko-6B_KO_Open-Platypus", + "model_sha": "0e85d36838b09082b433d619c93744245219e9bf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jb723/3B_test_model/result_2023-11-21 05:17:41.json b/jb723/3B_test_model/result_2023-11-21 05:17:41.json new file mode 100644 index 0000000000000000000000000000000000000000..53e1c4df046e07347fb72e7f800ca39c58f41479 --- /dev/null +++ b/jb723/3B_test_model/result_2023-11-21 05:17:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19368600682593856, + "acc_stderr": 0.01154842540997854, + "acc_norm": 0.2627986348122867, + "acc_norm_stderr": 0.012862523175351333 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2524397530372436, + "acc_stderr": 0.0043352434344868275, + "acc_norm": 0.26030671181039633, + "acc_norm_stderr": 0.004379051357024134 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2046783625730994, + "acc_stderr": 0.03094445977853321, + "acc_norm": 0.2046783625730994, + "acc_norm_stderr": 0.03094445977853321 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386687, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386687 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21864951768488747, + "acc_stderr": 0.023475581417861106, + "acc_norm": 0.21864951768488747, + "acc_norm_stderr": 0.023475581417861106 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.032361983509282745, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.032361983509282745 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863807, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863807 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.258974358974359, + "acc_stderr": 0.02221110681006167, + "acc_norm": 0.258974358974359, + "acc_norm_stderr": 0.02221110681006167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.02945486383529295, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.02945486383529295 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.024580028921480996, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.024580028921480996 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20085470085470086, + "acc_stderr": 0.02624677294689048, + "acc_norm": 0.20085470085470086, + "acc_norm_stderr": 0.02624677294689048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.02674989977124124, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.02674989977124124 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.19090909090909092, + "acc_stderr": 0.03764425585984926, + "acc_norm": 0.19090909090909092, + "acc_norm_stderr": 0.03764425585984926 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.19205298013245034, + "acc_stderr": 0.032162984205936135, + "acc_norm": 0.19205298013245034, + "acc_norm_stderr": 0.032162984205936135 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258172, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258172 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24382716049382716, + "acc_stderr": 0.023891879541959614, + "acc_norm": 0.24382716049382716, + "acc_norm_stderr": 0.023891879541959614 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341016, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341016 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537537, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2245810055865922, + "acc_stderr": 0.01395680366654464, + "acc_norm": 0.2245810055865922, + "acc_norm_stderr": 0.01395680366654464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.5049262297308551, + "mc2_stderr": 0.01678411384401745 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08736717827626919, + "acc_stderr": 0.009708162004168805, + "acc_norm": 0.2833530106257379, + "acc_norm_stderr": 0.015492852084597239 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jb723/3B_test_model", + "model_sha": "1a19c136fddd374cf68262eeb647d42f36626495", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jb723/LLaMA2-en-ko-7B-model/result_2023-10-26 04:11:34.json b/jb723/LLaMA2-en-ko-7B-model/result_2023-10-26 04:11:34.json new file mode 100644 index 0000000000000000000000000000000000000000..7fcc671852bd04a432237153b5ab61c8140b7b12 --- /dev/null +++ b/jb723/LLaMA2-en-ko-7B-model/result_2023-10-26 04:11:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20051194539249148, + "acc_stderr": 0.011700318050499354, + "acc_norm": 0.24573378839590443, + "acc_norm_stderr": 0.012581033453730099 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2593108942441745, + "acc_stderr": 0.004373608212561024, + "acc_norm": 0.2818163712407887, + "acc_norm_stderr": 0.004489648865080873 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2796934865900383, + "acc_stderr": 0.016050792148036536, + "acc_norm": 0.2796934865900383, + "acc_norm_stderr": 0.016050792148036536 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.034554737023254366, + "acc_norm": 0.2, + "acc_norm_stderr": 0.034554737023254366 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.030251237579213167, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.030251237579213167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370519, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370519 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140485, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140485 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.03021683101150877, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.03021683101150877 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185554, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185554 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.258974358974359, + "acc_stderr": 0.022211106810061672, + "acc_norm": 0.258974358974359, + "acc_norm_stderr": 0.022211106810061672 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617722, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617722 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029265, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029265 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3034188034188034, + "acc_stderr": 0.030118210106942662, + "acc_norm": 0.3034188034188034, + "acc_norm_stderr": 0.030118210106942662 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.02804918631569525, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.02804918631569525 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945266, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945266 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31840796019900497, + "acc_stderr": 0.032941184790540944, + "acc_norm": 0.31840796019900497, + "acc_norm_stderr": 0.032941184790540944 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641143, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641143 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388677003, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388677003 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724146, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724146 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.0242885336377261, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.0242885336377261 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29015544041450775, + "acc_stderr": 0.03275264467791516, + "acc_norm": 0.29015544041450775, + "acc_norm_stderr": 0.03275264467791516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299102, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299102 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02564686309713791, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02564686309713791 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312337, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312337 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.017986615304030312, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.017986615304030312 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467764, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467764 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.028353212866863434, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.028353212866863434 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.026679252270103124, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.026679252270103124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073146, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658342, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658342 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.011005971399927227, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.011005971399927227 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967409, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967409 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23378212974296206, + "mc1_stderr": 0.014816195991931586, + "mc2": 0.4292237253037698, + "mc2_stderr": 0.016355958546968995 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.11452184179456906, + "acc_stderr": 0.010948330698808925, + "acc_norm": 0.1959858323494687, + "acc_norm_stderr": 0.013647685567768858 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jb723/LLaMA2-en-ko-7B-model", + "model_sha": "24e455bbf4039f360a37833583c335582d2c6030", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jb723/LLaMA2_crosslingual_transfer_1/result_2023-10-26 05:14:36.json b/jb723/LLaMA2_crosslingual_transfer_1/result_2023-10-26 05:14:36.json new file mode 100644 index 0000000000000000000000000000000000000000..b8859f59612c4c8e7b9bf1fc7899f6e6e6d6b596 --- /dev/null +++ b/jb723/LLaMA2_crosslingual_transfer_1/result_2023-10-26 05:14:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2226962457337884, + "acc_stderr": 0.012158314774829931, + "acc_norm": 0.2687713310580205, + "acc_norm_stderr": 0.012955065963710695 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2657837084246166, + "acc_stderr": 0.004408468107262734, + "acc_norm": 0.2920732921728739, + "acc_norm_stderr": 0.004537865171414028 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.0398913985953177, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.0398913985953177 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.016073127851221232, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.016073127851221232 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.0335567721631314, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.0335567721631314 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.02895734278834235, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.02895734278834235 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680588, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680588 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2315112540192926, + "acc_stderr": 0.023956532766639133, + "acc_norm": 0.2315112540192926, + "acc_norm_stderr": 0.023956532766639133 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.04118438565806298, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.04118438565806298 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2689075630252101, + "acc_stderr": 0.028801392193631276, + "acc_norm": 0.2689075630252101, + "acc_norm_stderr": 0.028801392193631276 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.021193632525148533, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.021193632525148533 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.02528441611490016, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.02528441611490016 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.37606837606837606, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.37606837606837606, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782855, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782855 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.1962962962962963, + "acc_stderr": 0.024217421327417162, + "acc_norm": 0.1962962962962963, + "acc_norm_stderr": 0.024217421327417162 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987054, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987054 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339191, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339191 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1907514450867052, + "acc_stderr": 0.029957851329869327, + "acc_norm": 0.1907514450867052, + "acc_norm_stderr": 0.029957851329869327 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.020742740560122666, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.020742740560122666 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388676985, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388676985 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.033519538795212696, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.033519538795212696 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294677, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845415, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845415 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22385321100917432, + "acc_stderr": 0.017871217767790215, + "acc_norm": 0.22385321100917432, + "acc_norm_stderr": 0.017871217767790215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.017776947157528044, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.017776947157528044 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290392, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290392 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.02746740180405799, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.02746740180405799 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20955882352941177, + "acc_stderr": 0.02472311040767704, + "acc_norm": 0.20955882352941177, + "acc_norm_stderr": 0.02472311040767704 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.02797982353874455, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.02797982353874455 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.03027497488021897, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.03027497488021897 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.20098039215686275, + "acc_stderr": 0.028125972265654362, + "acc_norm": 0.20098039215686275, + "acc_norm_stderr": 0.028125972265654362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603488, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603488 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.49334428566474076, + "mc2_stderr": 0.016873715132849066 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08146399055489964, + "acc_stderr": 0.009404717441946268, + "acc_norm": 0.32113341204250295, + "acc_norm_stderr": 0.016052762579111562 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jb723/LLaMA2_crosslingual_transfer_1", + "model_sha": "ece29b636ef0b0c4b6d945ed66e97510b3ad6b0a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jb723/cross_lingual_epoch2/result_2023-10-26 12:25:31.json b/jb723/cross_lingual_epoch2/result_2023-10-26 12:25:31.json new file mode 100644 index 0000000000000000000000000000000000000000..93698f8438aeb4967cde29429905b3fcce1cd766 --- /dev/null +++ b/jb723/cross_lingual_epoch2/result_2023-10-26 12:25:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2226962457337884, + "acc_stderr": 0.012158314774829928, + "acc_norm": 0.2841296928327645, + "acc_norm_stderr": 0.013179442447653887 + }, + "harness|ko_hellaswag|10": { + "acc": 0.26628161720772753, + "acc_stderr": 0.004411099046251013, + "acc_norm": 0.29107747460665206, + "acc_norm_stderr": 0.004533307758521328 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.039891398595317706, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.039891398595317706 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3205619412515964, + "acc_stderr": 0.016688893310803775, + "acc_norm": 0.3205619412515964, + "acc_norm_stderr": 0.016688893310803775 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.029513196625539355, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.029513196625539355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.034843315926805875, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.034843315926805875 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3504823151125402, + "acc_stderr": 0.027098652621301757, + "acc_norm": 0.3504823151125402, + "acc_norm_stderr": 0.027098652621301757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.0403931497872456, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.0403931497872456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03038835355188685, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03038835355188685 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2153846153846154, + "acc_stderr": 0.020843034557462878, + "acc_norm": 0.2153846153846154, + "acc_norm_stderr": 0.020843034557462878 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854932, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114475, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114475 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335134, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335134 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.43162393162393164, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.43162393162393164, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708076, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708076 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910509, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910509 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.36318407960199006, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.36318407960199006, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.315028901734104, + "acc_stderr": 0.025009313790069706, + "acc_norm": 0.315028901734104, + "acc_norm_stderr": 0.025009313790069706 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33024691358024694, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.33024691358024694, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28256880733944956, + "acc_stderr": 0.01930424349770715, + "acc_norm": 0.28256880733944956, + "acc_norm_stderr": 0.01930424349770715 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604673, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604673 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046637, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046637 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952924, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.01866335967146366, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.01866335967146366 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467764, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467764 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.026491914727355154, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.026491914727355154 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635317005, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635317005 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1948529411764706, + "acc_stderr": 0.024060599423487424, + "acc_norm": 0.1948529411764706, + "acc_norm_stderr": 0.024060599423487424 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.030116426296540585, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.030116426296540585 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.027325470966716305, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.027325470966716305 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23255813953488372, + "mc1_stderr": 0.014789157531080522, + "mc2": 0.494893188252647, + "mc2_stderr": 0.016817822778795313 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09681227863046045, + "acc_stderr": 0.010166443512074711, + "acc_norm": 0.3612750885478158, + "acc_norm_stderr": 0.016515463022411997 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jb723/cross_lingual_epoch2", + "model_sha": "aa1654ae948febe0f7cf3e27d5f81a8df7a58118", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jb723/llama2-ko-7B-model/result_2023-09-27 11:00:22.json b/jb723/llama2-ko-7B-model/result_2023-09-27 11:00:22.json new file mode 100644 index 0000000000000000000000000000000000000000..4c70ff55d7f3259fbfa4284535dab5e62f5f0475 --- /dev/null +++ b/jb723/llama2-ko-7B-model/result_2023-09-27 11:00:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2226962457337884, + "acc_stderr": 0.012158314774829948, + "acc_norm": 0.2627986348122867, + "acc_norm_stderr": 0.012862523175351331 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2726548496315475, + "acc_stderr": 0.004444146875436292, + "acc_norm": 0.29635530770762797, + "acc_norm_stderr": 0.004557163175885563 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824561, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824561 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161549, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161549 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2848020434227331, + "acc_stderr": 0.016139174096522553, + "acc_norm": 0.2848020434227331, + "acc_norm_stderr": 0.016139174096522553 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066654, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066654 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231008, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231008 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648026, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648026 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969195, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969195 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.029857515673386417, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.029857515673386417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386215, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2689075630252101, + "acc_stderr": 0.028801392193631276, + "acc_norm": 0.2689075630252101, + "acc_norm_stderr": 0.028801392193631276 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.021992016662370568, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.021992016662370568 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114454, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114454 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.02528441611490016, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.02528441611490016 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3418803418803419, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.3418803418803419, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695248, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695248 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.044612721759105065, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.044612721759105065 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871927, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871927 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.38308457711442784, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.38308457711442784, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633345, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633345 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.315028901734104, + "acc_stderr": 0.025009313790069692, + "acc_norm": 0.315028901734104, + "acc_norm_stderr": 0.025009313790069692 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.02570264026060375, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.02570264026060375 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.03355397369686173, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.03355397369686173 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.01827257581023186, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.01827257581023186 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.03391160934343604, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.03391160934343604 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.017952449196987866, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.017952449196987866 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880585, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880585 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19907407407407407, + "acc_stderr": 0.027232298462690218, + "acc_norm": 0.19907407407407407, + "acc_norm_stderr": 0.027232298462690218 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02518778666022727, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02518778666022727 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20408163265306123, + "acc_stderr": 0.025801283475090496, + "acc_norm": 0.20408163265306123, + "acc_norm_stderr": 0.025801283475090496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.029936696387138598, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.029936696387138598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24837027379400262, + "acc_stderr": 0.011035212598034494, + "acc_norm": 0.24837027379400262, + "acc_norm_stderr": 0.011035212598034494 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591362, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557966, + "mc2": 0.43443146146429873, + "mc2_stderr": 0.01580310882533787 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.11452184179456906, + "acc_stderr": 0.010948330698808925, + "acc_norm": 0.1959858323494687, + "acc_norm_stderr": 0.013647685567768858 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jb723/llama2-ko-7B-model", + "model_sha": "24e455bbf4039f360a37833583c335582d2c6030", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jeonsworld/CarbonVillain-10.7B-v1/result_2023-12-31 06:47:29.json b/jeonsworld/CarbonVillain-10.7B-v1/result_2023-12-31 06:47:29.json new file mode 100644 index 0000000000000000000000000000000000000000..df38298049cc197c9e31a3c4bc4ac3e4bfeb7b3a --- /dev/null +++ b/jeonsworld/CarbonVillain-10.7B-v1/result_2023-12-31 06:47:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4564846416382253, + "acc_stderr": 0.01455594976049644, + "acc_norm": 0.4991467576791809, + "acc_norm_stderr": 0.014611369529813279 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44592710615415254, + "acc_stderr": 0.004960516570284905, + "acc_norm": 0.6064528978291177, + "acc_norm_stderr": 0.004875379352079818 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.04453254836326468, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.04453254836326468 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6819923371647509, + "acc_stderr": 0.016653486275615418, + "acc_norm": 0.6819923371647509, + "acc_norm_stderr": 0.016653486275615418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.0326620429906468, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.0326620429906468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.043285772152629735, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.043285772152629735 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062947, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062947 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5743589743589743, + "acc_stderr": 0.025069094387296525, + "acc_norm": 0.5743589743589743, + "acc_norm_stderr": 0.025069094387296525 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.02762171783290703, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.02762171783290703 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.02645350805404033, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.02645350805404033 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.029560707392465715, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.029560707392465715 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.039580272311215706, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.039580272311215706 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4470899470899471, + "acc_stderr": 0.025606723995777025, + "acc_norm": 0.4470899470899471, + "acc_norm_stderr": 0.025606723995777025 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.02622615860512465, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.02622615860512465 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5987654320987654, + "acc_stderr": 0.027272582849839792, + "acc_norm": 0.5987654320987654, + "acc_norm_stderr": 0.027272582849839792 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.047028804320496165, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.047028804320496165 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.689908256880734, + "acc_stderr": 0.019830849684439752, + "acc_norm": 0.689908256880734, + "acc_norm_stderr": 0.019830849684439752 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.020192808271433788, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.020192808271433788 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.43617021276595747, + "acc_stderr": 0.029583452036284062, + "acc_norm": 0.43617021276595747, + "acc_norm_stderr": 0.029583452036284062 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19553072625698323, + "acc_stderr": 0.013264579220945098, + "acc_norm": 0.19553072625698323, + "acc_norm_stderr": 0.013264579220945098 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5183823529411765, + "acc_stderr": 0.03035230339535196, + "acc_norm": 0.5183823529411765, + "acc_norm_stderr": 0.03035230339535196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.027479744550808517, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.027479744550808517 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4152542372881356, + "acc_stderr": 0.012585471793400665, + "acc_norm": 0.4152542372881356, + "acc_norm_stderr": 0.012585471793400665 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.036462049632538136, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.036462049632538136 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.016305988648920605, + "mc2": 0.4821580350888159, + "mc2_stderr": 0.01537715862983969 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5855962219598583, + "acc_stderr": 0.016936583383943608, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.016616612843224948 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jeonsworld/CarbonVillain-10.7B-v1", + "model_sha": "f016ba7ef7a51ce15b334176f25df87104af655f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jeonsworld/CarbonVillain-13B-v1/result_2023-12-28 20:02:14.json b/jeonsworld/CarbonVillain-13B-v1/result_2023-12-28 20:02:14.json new file mode 100644 index 0000000000000000000000000000000000000000..ffde1ee29ed84249c7a59afe42f44bfb0d216e45 --- /dev/null +++ b/jeonsworld/CarbonVillain-13B-v1/result_2023-12-28 20:02:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4232081911262799, + "acc_stderr": 0.01443803622084803, + "acc_norm": 0.48378839590443684, + "acc_norm_stderr": 0.014603708567414933 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45200159330810596, + "acc_stderr": 0.00496673681101049, + "acc_norm": 0.6045608444532962, + "acc_norm_stderr": 0.004879455474663812 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.042763494943765974, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.042763494943765974 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840622, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126174, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126174 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655816, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655816 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348923, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348923 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.02113637650403087, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.02113637650403087 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.02804594694204239, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.02804594694204239 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527836, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527836 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254163, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254163 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610805, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610805 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36571056062581486, + "acc_stderr": 0.01230102818884057, + "acc_norm": 0.36571056062581486, + "acc_norm_stderr": 0.01230102818884057 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33659730722154224, + "mc1_stderr": 0.016542412809494873, + "mc2": 0.5085732867841173, + "mc2_stderr": 0.015520250860491847 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489287, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jeonsworld/CarbonVillain-13B-v1", + "model_sha": "3ddeca5a6993bdb8f4a456f7e0db598b0841d87e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jhflow/komt-mistral7b-kor-orca-lora/result_2023-10-28 07:47:54.json b/jhflow/komt-mistral7b-kor-orca-lora/result_2023-10-28 07:47:54.json new file mode 100644 index 0000000000000000000000000000000000000000..68cf1f07617c63fd062e8eb9b0c158269cd95bc3 --- /dev/null +++ b/jhflow/komt-mistral7b-kor-orca-lora/result_2023-10-28 07:47:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3302047781569966, + "acc_stderr": 0.013743085603760424, + "acc_norm": 0.3720136518771331, + "acc_norm_stderr": 0.014124597881844461 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3610834495120494, + "acc_stderr": 0.004793330525656211, + "acc_norm": 0.4630551682931687, + "acc_norm_stderr": 0.004976141457736879 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259263996, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259263996 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621502, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621502 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357766, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357766 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38974358974358975, + "acc_stderr": 0.024726967886647078, + "acc_norm": 0.38974358974358975, + "acc_norm_stderr": 0.024726967886647078 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220385, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.03158539157745636, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.03158539157745636 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.03874102859818083, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.03874102859818083 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.02716368603827124, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.02716368603827124 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46238532110091746, + "acc_stderr": 0.021376575274397576, + "acc_norm": 0.46238532110091746, + "acc_norm_stderr": 0.021376575274397576 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852387, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852387 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.01941253924203216, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.01941253924203216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.015024083883322895, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.015024083883322895 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3285528031290743, + "acc_stderr": 0.011996027247502927, + "acc_norm": 0.3285528031290743, + "acc_norm_stderr": 0.011996027247502927 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834562, + "mc2": 0.4512199737148749, + "mc2_stderr": 0.015325712009535085 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4474616292798111, + "acc_stderr": 0.01709519030150058, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.017184015060401455 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jhflow/komt-mistral7b-kor-orca-lora", + "model_sha": "16c036d4e96674aa4210dfce64482dbc155b6b44", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jhflow/mistral7b-lora-multi-turn-v2/result_2023-11-02 00:49:20.json b/jhflow/mistral7b-lora-multi-turn-v2/result_2023-11-02 00:49:20.json new file mode 100644 index 0000000000000000000000000000000000000000..114451e285d0cfe9b6b37ec41c365894325c329c --- /dev/null +++ b/jhflow/mistral7b-lora-multi-turn-v2/result_2023-11-02 00:49:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635474, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892889 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36715793666600277, + "acc_stderr": 0.004810449343572393, + "acc_norm": 0.4765982871937861, + "acc_norm_stderr": 0.004984313205791441 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.01785998976517645, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.01785998976517645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683512, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683512 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089768, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089768 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.01980828131744985, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.01980828131744985 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20782122905027933, + "acc_stderr": 0.013570248325081347, + "acc_norm": 0.20782122905027933, + "acc_norm_stderr": 0.013570248325081347 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125468, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125468 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.0321481463024037, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.0321481463024037 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228575, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833343, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833343 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454892, + "mc2": 0.45851375159014823, + "mc2_stderr": 0.015537179333977727 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342558, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jhflow/mistral7b-lora-multi-turn-v2", + "model_sha": "a425082361b06134ccebef1b5f841c2edd27f644", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jhflow/mistral7b-lora-multi-turn-v3/result_2023-11-06 01:57:51.json b/jhflow/mistral7b-lora-multi-turn-v3/result_2023-11-06 01:57:51.json new file mode 100644 index 0000000000000000000000000000000000000000..a50c934dcafbe1315abc46eae3533c1278e432c3 --- /dev/null +++ b/jhflow/mistral7b-lora-multi-turn-v3/result_2023-11-06 01:57:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497717, + "acc_norm": 0.39419795221843, + "acc_norm_stderr": 0.014280522667467316 + }, + "harness|ko_hellaswag|10": { + "acc": 0.371539533957379, + "acc_stderr": 0.0048222865563052175, + "acc_norm": 0.48088030272854015, + "acc_norm_stderr": 0.004986131919673968 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.017852981266633955, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.017852981266633955 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.028013651891995072, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.028013651891995072 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017838, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017838 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431194, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230186, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230186 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.033742355504256936, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.033742355504256936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129274, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129274 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.02683080599895223, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.02683080599895223 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.021414757058175502, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.021414757058175502 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094597, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094597 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536044, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409151, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409151 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3546284224250326, + "acc_stderr": 0.012218576439090167, + "acc_norm": 0.3546284224250326, + "acc_norm_stderr": 0.012218576439090167 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398396, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398396 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627887, + "mc2": 0.4728929290392366, + "mc2_stderr": 0.01563566589182946 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4155844155844156, + "acc_stderr": 0.016943586313076568, + "acc_norm": 0.46989374262101535, + "acc_norm_stderr": 0.017159163590170216 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jhflow/mistral7b-lora-multi-turn-v3", + "model_sha": "6ff6149ce4b66cbd5acb5e9683c44c50aae2ccd7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jhflow/mistral7b-lora-multiturn-v4/result_2023-12-03 11:37:39.json b/jhflow/mistral7b-lora-multiturn-v4/result_2023-12-03 11:37:39.json new file mode 100644 index 0000000000000000000000000000000000000000..1dd83057be7931a405c872560d789fa51379a850 --- /dev/null +++ b/jhflow/mistral7b-lora-multiturn-v4/result_2023-12-03 11:37:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225395, + "acc_norm": 0.4761092150170648, + "acc_norm_stderr": 0.014594701798071654 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4041027683728341, + "acc_stderr": 0.004897146690596254, + "acc_norm": 0.5364469229237204, + "acc_norm_stderr": 0.004976507121076267 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.037792759455032014, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.037792759455032014 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.578544061302682, + "acc_stderr": 0.017657976412654854, + "acc_norm": 0.578544061302682, + "acc_norm_stderr": 0.017657976412654854 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714506, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714506 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5051282051282051, + "acc_stderr": 0.025349672906838636, + "acc_norm": 0.5051282051282051, + "acc_norm_stderr": 0.025349672906838636 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004257, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004257 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683512, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683512 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119994, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119994 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.03400598505599015, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.03400598505599015 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670788, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670788 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5284403669724771, + "acc_stderr": 0.021402615697348044, + "acc_norm": 0.5284403669724771, + "acc_norm_stderr": 0.021402615697348044 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.02858034106513829, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.02858034106513829 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19664804469273742, + "acc_stderr": 0.01329318302745465, + "acc_norm": 0.19664804469273742, + "acc_norm_stderr": 0.01329318302745465 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610812, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610812 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585892, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585892 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32558139534883723, + "mc1_stderr": 0.01640398946990781, + "mc2": 0.4915099229946796, + "mc2_stderr": 0.015373142456080352 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727634, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.017184015060401455 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jhflow/mistral7b-lora-multiturn-v4", + "model_sha": "6e9923c239780e00a982e5a212ab70aa5b19c071", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jhflow/yi-ko-6b-dpo-further/result_2023-12-20 08:31:30.json b/jhflow/yi-ko-6b-dpo-further/result_2023-12-20 08:31:30.json new file mode 100644 index 0000000000000000000000000000000000000000..0a9bdfbcf3ef40007b46fd5f363e3bc7490611ee --- /dev/null +++ b/jhflow/yi-ko-6b-dpo-further/result_2023-12-20 08:31:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34982935153583616, + "acc_stderr": 0.013936809212158287, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303026 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4018123879705238, + "acc_stderr": 0.004892624490937208, + "acc_norm": 0.5337582154949213, + "acc_norm_stderr": 0.004978395540514387 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5696040868454662, + "acc_stderr": 0.01770586877629239, + "acc_norm": 0.5696040868454662, + "acc_norm_stderr": 0.01770586877629239 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999365, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999365 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.025317649726448652, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.025317649726448652 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942645, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307706, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307706 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958215, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958215 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.020920058346111065, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.020920058346111065 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.01982184368827177, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.01982184368827177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915185, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915185 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409163, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409163 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016643, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016643 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935893, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935893 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464752, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464752 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4470960631487972, + "mc2_stderr": 0.014964323216719578 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5301062573789846, + "acc_stderr": 0.01715916359017022, + "acc_norm": 0.6080283353010626, + "acc_norm_stderr": 0.016784332119424084 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jhflow/yi-ko-6b-dpo-further", + "model_sha": "152a5039537a1898c6a352619dfb0740176c3965", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jhflow/yi-ko-6b-lora-v1/result_2023-12-05 01:06:53.json b/jhflow/yi-ko-6b-lora-v1/result_2023-12-05 01:06:53.json new file mode 100644 index 0000000000000000000000000000000000000000..5081caf4bd59816cc95fdfbc4b203c5158bd7dfc --- /dev/null +++ b/jhflow/yi-ko-6b-lora-v1/result_2023-12-05 01:06:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179349, + "acc_norm": 0.4138225255972696, + "acc_norm_stderr": 0.014392730009221009 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4023102967536347, + "acc_stderr": 0.004893617014975309, + "acc_norm": 0.5377414857598088, + "acc_norm_stderr": 0.0049755460189506735 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.017747874245683606, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.017747874245683606 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511326, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511326 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016337, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016337 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933903, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933903 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5193548387096775, + "acc_stderr": 0.028422687404312117, + "acc_norm": 0.5193548387096775, + "acc_norm_stderr": 0.028422687404312117 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.03011821010694265, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.03011821010694265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518026, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518026 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376896, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376896 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604674, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604674 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786682, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786682 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.044642857142857116, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.044642857142857116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125478, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125478 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.031001209039894836, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.031001209039894836 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3194263363754889, + "acc_stderr": 0.01190835717675616, + "acc_norm": 0.3194263363754889, + "acc_norm_stderr": 0.01190835717675616 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.038783721137112745, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.038783721137112745 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608742, + "mc2": 0.4320582855204373, + "mc2_stderr": 0.014839195488728087 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654273, + "acc_norm": 0.564344746162928, + "acc_norm_stderr": 0.017047415229476316 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jhflow/yi-ko-6b-lora-v1", + "model_sha": "d987b8419e44ab180e843b39fb75d24b2530ffd7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jin05102518/Astral-7B-0.5Epoch-Test/result_2023-11-03 02:11:10.json b/jin05102518/Astral-7B-0.5Epoch-Test/result_2023-11-03 02:11:10.json new file mode 100644 index 0000000000000000000000000000000000000000..815d14e7d4f2470144c0e99800a505add14dd174 --- /dev/null +++ b/jin05102518/Astral-7B-0.5Epoch-Test/result_2023-11-03 02:11:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21416382252559726, + "acc_stderr": 0.011988383205966482, + "acc_norm": 0.25170648464163825, + "acc_norm_stderr": 0.01268249633404296 + }, + "harness|ko_hellaswag|10": { + "acc": 0.273451503684525, + "acc_stderr": 0.004448196648383001, + "acc_norm": 0.29635530770762797, + "acc_norm_stderr": 0.004557163175885562 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.036310534964889056, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.036310534964889056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2720306513409962, + "acc_stderr": 0.01591336744750051, + "acc_norm": 0.2720306513409962, + "acc_norm_stderr": 0.01591336744750051 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412481, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412481 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.23493975903614459, + "acc_stderr": 0.03300533186128922, + "acc_norm": 0.23493975903614459, + "acc_norm_stderr": 0.03300533186128922 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2347266881028939, + "acc_stderr": 0.024071805887677045, + "acc_norm": 0.2347266881028939, + "acc_norm_stderr": 0.024071805887677045 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.03089861088247751, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.03089861088247751 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267042, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267042 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3277310924369748, + "acc_stderr": 0.03048991141767323, + "acc_norm": 0.3277310924369748, + "acc_norm_stderr": 0.03048991141767323 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2794871794871795, + "acc_stderr": 0.022752388839776823, + "acc_norm": 0.2794871794871795, + "acc_norm_stderr": 0.022752388839776823 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591311, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591311 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114482, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114482 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36752136752136755, + "acc_stderr": 0.03158539157745635, + "acc_norm": 0.36752136752136755, + "acc_norm_stderr": 0.03158539157745635 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708083, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708083 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946459, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946459 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.032801882053486414, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.032801882053486414 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02306818884826111, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02306818884826111 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30346820809248554, + "acc_stderr": 0.02475241196091722, + "acc_norm": 0.30346820809248554, + "acc_norm_stderr": 0.02475241196091722 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2808641975308642, + "acc_stderr": 0.025006469755799208, + "acc_norm": 0.2808641975308642, + "acc_norm_stderr": 0.025006469755799208 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.03646758875075566, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.03646758875075566 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25137614678899084, + "acc_stderr": 0.01859920636028741, + "acc_norm": 0.25137614678899084, + "acc_norm_stderr": 0.01859920636028741 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488795, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488795 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.02736359328468494, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.02736359328468494 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810538, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810538 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898435, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898435 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.02737294220178816, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.02737294220178816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2666232073011734, + "acc_stderr": 0.011293836031612142, + "acc_norm": 0.2666232073011734, + "acc_norm_stderr": 0.011293836031612142 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.03256685484460389, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.03256685484460389 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.034277431758165236, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.034277431758165236 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752322, + "mc2": 0.45037975897741206, + "mc2_stderr": 0.015986987451663295 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.179456906729634, + "acc_stderr": 0.013193062031400433, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.015163499477892412 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jin05102518/Astral-7B-0.5Epoch-Test", + "model_sha": "cf77310443930dfb98bc55603555822c98af0309", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05/result_2023-10-22 13:07:21.json b/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05/result_2023-10-22 13:07:21.json new file mode 100644 index 0000000000000000000000000000000000000000..b8d8f7c59fa8353cdf052a487a3d8a6add7f64c9 --- /dev/null +++ b/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05/result_2023-10-22 13:07:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3148464163822526, + "acc_stderr": 0.01357265770308495, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.014150631435111726 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3571997610037841, + "acc_stderr": 0.004781950883460504, + "acc_norm": 0.4569806811392153, + "acc_norm_stderr": 0.004971278309204196 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4342273307790549, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.4342273307790549, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484068, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484068 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755292, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755292 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539746, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539746 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009805, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009805 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.037038511930995215, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.037038511930995215 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.02475747390275205, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.02475747390275205 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142624, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142624 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.01943177567703731, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.01943177567703731 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042405, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33631284916201115, + "acc_stderr": 0.015801003729145908, + "acc_norm": 0.33631284916201115, + "acc_norm_stderr": 0.015801003729145908 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163906, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.011901895635786084, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.011901895635786084 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386854, + "mc2": 0.4745826617149022, + "mc2_stderr": 0.015464604846827046 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38016528925619836, + "acc_stderr": 0.0166893335969801, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.01694358631307657 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05", + "model_sha": "fb04a8d5574256eefe4faa1783874384c88eea9b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.06/result_2023-11-02 15:48:21.json b/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.06/result_2023-11-02 15:48:21.json new file mode 100644 index 0000000000000000000000000000000000000000..593dc97fe3e01ce6a5d0a9844e3bcbe5dbe7fdce --- /dev/null +++ b/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.06/result_2023-11-02 15:48:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3720136518771331, + "acc_stderr": 0.014124597881844463, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326023 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38398725353515234, + "acc_stderr": 0.004853608805843877, + "acc_norm": 0.5012945628360884, + "acc_norm_stderr": 0.0049897646867388306 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.51213282247765, + "acc_stderr": 0.017874698667491338, + "acc_norm": 0.51213282247765, + "acc_norm_stderr": 0.017874698667491338 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685516, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685516 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.0324498084999003, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.0324498084999003 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.02496268356433182, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.02496268356433182 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465076, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465076 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.02437319786798305, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.02437319786798305 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833942, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46605504587155966, + "acc_stderr": 0.021387863350354, + "acc_norm": 0.46605504587155966, + "acc_norm_stderr": 0.021387863350354 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.019706875804085637, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.019706875804085637 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2122905027932961, + "acc_stderr": 0.013676644685831728, + "acc_norm": 0.2122905027932961, + "acc_norm_stderr": 0.013676644685831728 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411952, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411952 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.03106721126287249, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.03106721126287249 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.031843998738112236, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.031843998738112236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502332, + "mc2": 0.4460519958175022, + "mc2_stderr": 0.015200803602621195 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.34946871310507677, + "acc_stderr": 0.016392797085769843, + "acc_norm": 0.3907910271546635, + "acc_norm_stderr": 0.01677529846510825 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jin05102518/Astral-7B-1.0Epoch-Instruct-v0.06", + "model_sha": "ff04d583e74c05644558288bcbbec86f701fd5d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jin05102518/Astral-7B-Instruct-v0.01/result_2023-10-13 16:06:56.json b/jin05102518/Astral-7B-Instruct-v0.01/result_2023-10-13 16:06:56.json new file mode 100644 index 0000000000000000000000000000000000000000..9aafe20bd47563f3be8595795e165f52e3db5055 --- /dev/null +++ b/jin05102518/Astral-7B-Instruct-v0.01/result_2023-10-13 16:06:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3199658703071672, + "acc_stderr": 0.013631345807016196, + "acc_norm": 0.38310580204778155, + "acc_norm_stderr": 0.014206472661672876 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36875124477195775, + "acc_stderr": 0.004814803098436803, + "acc_norm": 0.4794861581358295, + "acc_norm_stderr": 0.0049855800659464565 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.454661558109834, + "acc_stderr": 0.017806304585052602, + "acc_norm": 0.454661558109834, + "acc_norm_stderr": 0.017806304585052602 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.039992628766177214, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.039992628766177214 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.032737667254591575, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.032737667254591575 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0438986995680878, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0438986995680878 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.02453759157283053, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.02453759157283053 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5769230769230769, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.5769230769230769, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871923, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871923 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851102, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851102 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.430635838150289, + "acc_stderr": 0.02665880027367238, + "acc_norm": 0.430635838150289, + "acc_norm_stderr": 0.02665880027367238 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39378238341968913, + "acc_stderr": 0.03526077095548237, + "acc_norm": 0.39378238341968913, + "acc_norm_stderr": 0.03526077095548237 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42385321100917434, + "acc_stderr": 0.02118726320908752, + "acc_norm": 0.42385321100917434, + "acc_norm_stderr": 0.02118726320908752 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.045077322787750874, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.045077322787750874 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.018999707383162662, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.018999707383162662 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611313, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611313 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29465449804432853, + "acc_stderr": 0.011643576764069548, + "acc_norm": 0.29465449804432853, + "acc_norm_stderr": 0.011643576764069548 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.0372820699868265, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.0372820699868265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476199, + "mc2": 0.4322824441345256, + "mc2_stderr": 0.015763267859642997 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3730814639905549, + "acc_stderr": 0.016627318275137432, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.01688474950319139 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jin05102518/Astral-7B-Instruct-v0.01", + "model_sha": "095682dac7dc303e13f3c4135333e5c78db5afbf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/Yi-ko-1.1-dedup/result_2023-12-26 01:45:47.json b/jingyeom/Yi-ko-1.1-dedup/result_2023-12-26 01:45:47.json new file mode 100644 index 0000000000000000000000000000000000000000..d9aa7ea8a8c317c2d7780116dff7ab04c13e2c4e --- /dev/null +++ b/jingyeom/Yi-ko-1.1-dedup/result_2023-12-26 01:45:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3779863481228669, + "acc_stderr": 0.014169664520303096, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.014521226405627079 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40689105755825533, + "acc_stderr": 0.004902502514738602, + "acc_norm": 0.5487950607448715, + "acc_norm_stderr": 0.0049659636472103134 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5696040868454662, + "acc_stderr": 0.017705868776292398, + "acc_norm": 0.5696040868454662, + "acc_norm_stderr": 0.017705868776292398 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993178, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993178 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.0324498084999003, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.0324498084999003 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.025317649726448652, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.025317649726448652 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.0483036602463533, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.0483036602463533 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.02844341422643831, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.02844341422643831 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.02375292871211214, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.02375292871211214 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805434, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.0142426300705749, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.0142426300705749 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048224, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.4181648648839536, + "mc2_stderr": 0.014998383674803386 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.017187658199336736, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/Yi-ko-1.1-dedup", + "model_sha": "9303f60af4c70b680d7391ab54b0b6d267862954", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/Yi-ko-1.1/result_2023-12-26 01:57:05.json b/jingyeom/Yi-ko-1.1/result_2023-12-26 01:57:05.json new file mode 100644 index 0000000000000000000000000000000000000000..152f4a3ad9f7d4dcfe1fac144d965ecbfac9b5a9 --- /dev/null +++ b/jingyeom/Yi-ko-1.1/result_2023-12-26 01:57:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.014111298751674948, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804234 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4159529974108743, + "acc_stderr": 0.004918781662373929, + "acc_norm": 0.5526787492531369, + "acc_norm_stderr": 0.0049620103382263464 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394216, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5259259259259259, + "acc_stderr": 0.04313531696750575, + "acc_norm": 0.5259259259259259, + "acc_norm_stderr": 0.04313531696750575 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.02529460802398648, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.02529460802398648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342596, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048487, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048487 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336936, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5908256880733945, + "acc_stderr": 0.02108067026443373, + "acc_norm": 0.5908256880733945, + "acc_norm_stderr": 0.02108067026443373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805413, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.043642261558410445, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.043642261558410445 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010066, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010066 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.01204381265584614, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.01204381265584614 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006514, + "mc2": 0.42276488773202564, + "mc2_stderr": 0.015556879375012815 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4639905548996458, + "acc_stderr": 0.01714571536548666, + "acc_norm": 0.4769775678866588, + "acc_norm_stderr": 0.017172121546727637 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/Yi-ko-1.1", + "model_sha": "81f5744d1fbdc6467e8df25ceff6f2f0cfaa06e1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/Yi-ko-1.2/result_2023-12-28 05:50:10.json b/jingyeom/Yi-ko-1.2/result_2023-12-28 05:50:10.json new file mode 100644 index 0000000000000000000000000000000000000000..49f78778d7441a9b42da06e9e5cf15d9fa39935c --- /dev/null +++ b/jingyeom/Yi-ko-1.2/result_2023-12-28 05:50:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407163, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580122 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41884086835291773, + "acc_stderr": 0.00492360920786154, + "acc_norm": 0.5478988249352719, + "acc_norm_stderr": 0.004966832553245044 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.0381107966983353, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.0381107966983353 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865626, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865626 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.02517404838400078, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.02517404838400078 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159664, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762616, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762616 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.581651376146789, + "acc_stderr": 0.02114954859644388, + "acc_norm": 0.581651376146789, + "acc_norm_stderr": 0.02114954859644388 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805413, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.020071257886886528, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.020071257886886528 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.044642857142857116, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.044642857142857116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.01428834380392529, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.01428834380392529 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311172, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311172 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.03186785930004129, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.03186785930004129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3324641460234681, + "acc_stderr": 0.012032022332260518, + "acc_norm": 0.3324641460234681, + "acc_norm_stderr": 0.012032022332260518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165635, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165635 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.40740897953830707, + "mc2_stderr": 0.015409125138870454 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4899645808736718, + "acc_stderr": 0.017186891286894043, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.017186891286894063 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/Yi-ko-1.2", + "model_sha": "7b0cdd9271915ce57a6f6da93e1830f7210b8a24", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/Yi-ko_3_1_7/result_2023-12-21 09:24:47.json b/jingyeom/Yi-ko_3_1_7/result_2023-12-21 09:24:47.json new file mode 100644 index 0000000000000000000000000000000000000000..e6082a7691a0f9706c9cffa956a722c4c6bf8a18 --- /dev/null +++ b/jingyeom/Yi-ko_3_1_7/result_2023-12-21 09:24:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142815, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650652 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4147580163314081, + "acc_stderr": 0.004916733258140296, + "acc_norm": 0.5525791674965146, + "acc_norm_stderr": 0.004962115526014293 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370608, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370608 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.03488901616852729, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.03488901616852729 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734026, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734026 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.02467786284133278, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.02467786284133278 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548914, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548914 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.020920058346111065, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.020920058346111065 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750187, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750187 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311172, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311172 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.031601069934496046, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.031601069934496046 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214941, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214941 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024626, + "mc2": 0.43705454925575216, + "mc2_stderr": 0.015879980235191146 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4604486422668241, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.017161563949916348 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/Yi-ko_3_1_7", + "model_sha": "e05783de0d8576cfcf5c14e8e66ae1349c03e4ad", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/mistral_ko_all_inst/result_2023-12-18 11:26:46.json b/jingyeom/mistral_ko_all_inst/result_2023-12-18 11:26:46.json new file mode 100644 index 0000000000000000000000000000000000000000..89eb1126575b1d07e19ca158b267faef140a8513 --- /dev/null +++ b/jingyeom/mistral_ko_all_inst/result_2023-12-18 11:26:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32764505119453924, + "acc_stderr": 0.01371584794071934, + "acc_norm": 0.36860068259385664, + "acc_norm_stderr": 0.0140978106780422 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36715793666600277, + "acc_stderr": 0.004810449343572393, + "acc_norm": 0.47988448516231824, + "acc_norm_stderr": 0.004985741706385723 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.01785298126663395, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.01785298126663395 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.025230381238934833, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.025230381238934833 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813322, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813322 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.037038511930995194, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.037038511930995194 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.02467786284133278, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.02467786284133278 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668784, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.021421402982548878, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.021421402982548878 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138282, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138282 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.01967580813528151, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.01967580813528151 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364548, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364548 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.02850145286039657, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.02850145286039657 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540476, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540476 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.0345423658538061, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.0345423658538061 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.4665680569692903, + "mc2_stderr": 0.01556157417629164 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42739079102715466, + "acc_stderr": 0.017008129844823156, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.01717394447429438 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/mistral_ko_all_inst", + "model_sha": "5782059fd899a370fc0e92a7d18501fd75f0b725", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/penguin3.1.6n_7b/result_2023-11-16 00:30:20.json b/jingyeom/penguin3.1.6n_7b/result_2023-11-16 00:30:20.json new file mode 100644 index 0000000000000000000000000000000000000000..c579500152b6d662c9a5649e90173dae4a7474d5 --- /dev/null +++ b/jingyeom/penguin3.1.6n_7b/result_2023-11-16 00:30:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3378839590443686, + "acc_stderr": 0.013822047922283509, + "acc_norm": 0.386518771331058, + "acc_norm_stderr": 0.01423008476191048 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3723361880103565, + "acc_stderr": 0.004824393076826635, + "acc_norm": 0.47211710814578767, + "acc_norm_stderr": 0.0049820167024459605 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4393358876117497, + "acc_stderr": 0.01774787424568361, + "acc_norm": 0.4393358876117497, + "acc_norm_stderr": 0.01774787424568361 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.02823776942208534, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.02823776942208534 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052452, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052452 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985905, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985905 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.02977308271331987, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.02977308271331987 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425082, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425082 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607708, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295341, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295341 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4, + "acc_stderr": 0.021004201260420075, + "acc_norm": 0.4, + "acc_norm_stderr": 0.021004201260420075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290324, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290324 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.01918463932809249, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.01918463932809249 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29720670391061454, + "acc_stderr": 0.015285313353641597, + "acc_norm": 0.29720670391061454, + "acc_norm_stderr": 0.015285313353641597 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.02777829870154544, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.02777829870154544 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.03078905113903081, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.03078905113903081 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085457, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085457 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.038154943086889305, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.038154943086889305 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237269, + "mc2": 0.4285943154065853, + "mc2_stderr": 0.01581116786098837 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2585596221959858, + "acc_stderr": 0.015053354438963981, + "acc_norm": 0.2939787485242031, + "acc_norm_stderr": 0.01566324256909113 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/penguin3.1.6n_7b", + "model_sha": "8e94b41ee3cf66c8448deb70f194e2c5c3c066c2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/seal3.1.3_ia3/result_2023-11-15 00:26:15.json b/jingyeom/seal3.1.3_ia3/result_2023-11-15 00:26:15.json new file mode 100644 index 0000000000000000000000000000000000000000..f63e44d3d6678620079655a6e9467f98146a5fcb --- /dev/null +++ b/jingyeom/seal3.1.3_ia3/result_2023-11-15 00:26:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38054607508532423, + "acc_stderr": 0.014188277712349814, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633832 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41784505078669587, + "acc_stderr": 0.0049219641338740195, + "acc_norm": 0.5613423620792671, + "acc_norm_stderr": 0.0049520870831289065 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49169859514687103, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.49169859514687103, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085328, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085328 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419034, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419034 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376536, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376536 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40336134453781514, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.40336134453781514, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.024756000382130945, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.024756000382130945 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.0343046241610387, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.0343046241610387 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3903225806451613, + "acc_stderr": 0.027751256636969573, + "acc_norm": 0.3903225806451613, + "acc_norm_stderr": 0.027751256636969573 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.030102793781791194, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.030102793781791194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276612, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276612 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535892, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535892 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.02672003438051499, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.02672003438051499 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668767, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44403669724770645, + "acc_stderr": 0.021302621211654525, + "acc_norm": 0.44403669724770645, + "acc_norm_stderr": 0.021302621211654525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.02758281141515961, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.02758281141515961 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.019206606848825362, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.019206606848825362 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320196, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320196 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329882, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329882 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29595827900912647, + "acc_stderr": 0.011658518525277054, + "acc_norm": 0.29595827900912647, + "acc_norm_stderr": 0.011658518525277054 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237269, + "mc2": 0.4123953965733462, + "mc2_stderr": 0.014907127171846607 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45690672963400236, + "acc_stderr": 0.017126389093086784, + "acc_norm": 0.5667060212514758, + "acc_norm_stderr": 0.017036683641893098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/seal3.1.3_ia3", + "model_sha": "9321128d80a13a06c07569136884d5c1bfa6fa66", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/seal3.1.6_ia3/result_2023-11-18 12:51:41.json b/jingyeom/seal3.1.6_ia3/result_2023-11-18 12:51:41.json new file mode 100644 index 0000000000000000000000000000000000000000..637ec5c0f2f5853dee75fcb30783e2fe66cd65fa --- /dev/null +++ b/jingyeom/seal3.1.6_ia3/result_2023-11-18 12:51:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979277, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633837 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41943835889265085, + "acc_stderr": 0.004924586362301654, + "acc_norm": 0.5659231228838877, + "acc_norm_stderr": 0.004946221512145287 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633938, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.024939313906940774, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.024939313906940774 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.031937057262002924, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.031937057262002924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895992, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895992 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0365634365335316, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0365634365335316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655816, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655816 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3545751633986928, + "acc_stderr": 0.019353360547553707, + "acc_norm": 0.3545751633986928, + "acc_norm_stderr": 0.019353360547553707 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626974, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626974 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.02866199620233531, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.02866199620233531 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163909, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163909 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31029986962190353, + "acc_stderr": 0.011815439293469829, + "acc_norm": 0.31029986962190353, + "acc_norm_stderr": 0.011815439293469829 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.40532931367786407, + "mc2_stderr": 0.014837785058995488 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4675324675324675, + "acc_stderr": 0.01715407371668286, + "acc_norm": 0.5548996458087367, + "acc_norm_stderr": 0.017086417431005464 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/seal3.1.6_ia3", + "model_sha": "d9e866e20d75db50cb1648ffc9b87a27761a13cc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/seal3.1.6n_7b/result_2023-11-16 00:30:31.json b/jingyeom/seal3.1.6n_7b/result_2023-11-16 00:30:31.json new file mode 100644 index 0000000000000000000000000000000000000000..9aab1e5196070df4a433ed426c1d281f601560f6 --- /dev/null +++ b/jingyeom/seal3.1.6n_7b/result_2023-11-16 00:30:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3310580204778157, + "acc_stderr": 0.013752062419817829, + "acc_norm": 0.4035836177474403, + "acc_norm_stderr": 0.014337158914268447 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40111531567416847, + "acc_stderr": 0.004891226138578063, + "acc_norm": 0.5273849830711014, + "acc_norm_stderr": 0.004982291744069922 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34355044699872284, + "acc_stderr": 0.016982145632652462, + "acc_norm": 0.34355044699872284, + "acc_norm_stderr": 0.016982145632652462 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031023, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.02679542232789395, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.02679542232789395 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291947, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291947 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.0394170763206489, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.0394170763206489 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.02835962087053395, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.02835962087053395 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.02199201666237056, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.02199201666237056 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.025284416114900156, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.025284416114900156 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3504273504273504, + "acc_stderr": 0.03125610824421881, + "acc_norm": 0.3504273504273504, + "acc_norm_stderr": 0.03125610824421881 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724057, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724057 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.031157150869355582, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.031157150869355582 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173043, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173043 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.1875, + "acc_stderr": 0.032639560491693344, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.032639560491693344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526502, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526502 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.029778663037752954, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.029778663037752954 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3247706422018349, + "acc_stderr": 0.02007772910931033, + "acc_norm": 0.3247706422018349, + "acc_norm_stderr": 0.02007772910931033 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.12698412698412698, + "acc_stderr": 0.02978041752268843, + "acc_norm": 0.12698412698412698, + "acc_norm_stderr": 0.02978041752268843 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.025457756696667874, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.025457756696667874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.037827289808654706, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.037827289808654706 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.018433427649401892, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.018433427649401892 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.028353212866863448, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.028353212866863448 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144703, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144703 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.02768297952296023, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.02768297952296023 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.34177215189873417, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.34177215189873417, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803545, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803545 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.3886319562727079, + "mc2_stderr": 0.014918898169695423 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605973, + "acc_norm": 0.39787485242030696, + "acc_norm_stderr": 0.016827959054733388 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/seal3.1.6n_7b", + "model_sha": "637f6e9182b916de57bac9d80d166b4c7e961670", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/seal_all_13b/result_2023-12-04 11:39:35.json b/jingyeom/seal_all_13b/result_2023-12-04 11:39:35.json new file mode 100644 index 0000000000000000000000000000000000000000..eadeca57230ad946167fc736a3c0a41b12ffb18c --- /dev/null +++ b/jingyeom/seal_all_13b/result_2023-12-04 11:39:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40955631399317405, + "acc_stderr": 0.014370358632472434, + "acc_norm": 0.4616040955631399, + "acc_norm_stderr": 0.01456824555029636 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4246166102370046, + "acc_stderr": 0.004932745013072715, + "acc_norm": 0.5771758613821948, + "acc_norm_stderr": 0.004929983692795069 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534436, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534436 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883232, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883232 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507755, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507755 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712152, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712152 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.037038511930995215, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.037038511930995215 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261117, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261117 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169934, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169934 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.0317987634217685, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.0317987634217685 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396584, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396584 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763128, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30182529335071706, + "acc_stderr": 0.011724350518105888, + "acc_norm": 0.30182529335071706, + "acc_norm_stderr": 0.011724350518105888 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22276621787025705, + "mc1_stderr": 0.014566506961396756, + "mc2": 0.3670812629713066, + "mc2_stderr": 0.014598520460295474 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31995277449822906, + "acc_stderr": 0.016037153840280552, + "acc_norm": 0.48288075560802834, + "acc_norm_stderr": 0.017180275246085626 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/seal_all_13b", + "model_sha": "3ad11ce5786df519f300ed28237e4337112ef01e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/seal_all_7b/result_2023-12-04 06:52:30.json b/jingyeom/seal_all_7b/result_2023-12-04 06:52:30.json new file mode 100644 index 0000000000000000000000000000000000000000..efee47cff8526f7740a2820d85ddd243660c8e09 --- /dev/null +++ b/jingyeom/seal_all_7b/result_2023-12-04 06:52:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33447098976109213, + "acc_stderr": 0.01378746032244138, + "acc_norm": 0.3924914675767918, + "acc_norm_stderr": 0.014269634635670726 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39075881298546106, + "acc_stderr": 0.004869232758103322, + "acc_norm": 0.5157339175463055, + "acc_norm_stderr": 0.004987310297290272 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457923, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457923 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34738186462324394, + "acc_stderr": 0.01702667174865573, + "acc_norm": 0.34738186462324394, + "acc_norm_stderr": 0.01702667174865573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745653, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745653 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.0332939411907353, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.0332939411907353 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31511254019292606, + "acc_stderr": 0.026385273703464492, + "acc_norm": 0.31511254019292606, + "acc_norm_stderr": 0.026385273703464492 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.27802690582959644, + "acc_stderr": 0.030069584874494043, + "acc_norm": 0.27802690582959644, + "acc_norm_stderr": 0.030069584874494043 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.04039314978724561, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.04039314978724561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165085, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165085 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533084, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533084 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.035240689515674495, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.035240689515674495 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.021193632525148522, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.021193632525148522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.046166311118017125, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.046166311118017125 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022884, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022884 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.03166098891888078, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.03166098891888078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.28679245283018867, + "acc_stderr": 0.027834912527544074, + "acc_norm": 0.28679245283018867, + "acc_norm_stderr": 0.027834912527544074 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153193, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153193 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.03119584087770029, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.03119584087770029 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28807339449541286, + "acc_stderr": 0.01941644589263602, + "acc_norm": 0.28807339449541286, + "acc_norm_stderr": 0.01941644589263602 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1349206349206349, + "acc_stderr": 0.030557101589417515, + "acc_norm": 0.1349206349206349, + "acc_norm_stderr": 0.030557101589417515 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.024954184324879905, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.024954184324879905 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053435, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590638, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590638 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.026991454502036733, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.026991454502036733 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1801470588235294, + "acc_stderr": 0.023345163616544855, + "acc_norm": 0.1801470588235294, + "acc_norm_stderr": 0.023345163616544855 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.02599111767281329, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.02599111767281329 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3206751054852321, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.3206751054852321, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2796610169491525, + "acc_stderr": 0.011463397393861947, + "acc_norm": 0.2796610169491525, + "acc_norm_stderr": 0.011463397393861947 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591362, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268049, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268049 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.21542227662178703, + "mc1_stderr": 0.014391902652427685, + "mc2": 0.3511785481321989, + "mc2_stderr": 0.014738072611274355 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252247, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.01645549600031453 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/seal_all_7b", + "model_sha": "d30240cec9e23bf26a7843a4b99e55a5f5c1d99d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/zephyr_all_7b/result_2023-12-05 04:35:39.json b/jingyeom/zephyr_all_7b/result_2023-12-05 04:35:39.json new file mode 100644 index 0000000000000000000000000000000000000000..9ebeec923f8c5d694b8cae081979deb7b009cd1c --- /dev/null +++ b/jingyeom/zephyr_all_7b/result_2023-12-05 04:35:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2986348122866894, + "acc_stderr": 0.01337407861506875, + "acc_norm": 0.3455631399317406, + "acc_norm_stderr": 0.013896938461145687 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3601872137024497, + "acc_stderr": 0.004790734683704582, + "acc_norm": 0.4553873730332603, + "acc_norm_stderr": 0.004969879532843085 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.03743979825926399, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.03743979825926399 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41762452107279696, + "acc_stderr": 0.017635637326951534, + "acc_norm": 0.41762452107279696, + "acc_norm_stderr": 0.017635637326951534 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40514469453376206, + "acc_stderr": 0.027882383791325953, + "acc_norm": 0.40514469453376206, + "acc_norm_stderr": 0.027882383791325953 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572234, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572234 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993178, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993178 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.02508830145469484, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.02508830145469484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.0302850092590098, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.0302850092590098 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.472636815920398, + "acc_stderr": 0.03530235517334683, + "acc_norm": 0.472636815920398, + "acc_norm_stderr": 0.03530235517334683 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920938, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920938 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.026074314851657083, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.026074314851657083 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3950617283950617, + "acc_stderr": 0.027201117666925657, + "acc_norm": 0.3950617283950617, + "acc_norm_stderr": 0.027201117666925657 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44036697247706424, + "acc_stderr": 0.02128431062376154, + "acc_norm": 0.44036697247706424, + "acc_norm_stderr": 0.02128431062376154 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.041349130183033156, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.041349130183033156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141114, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141114 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.018463154132632813, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.018463154132632813 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2994413407821229, + "acc_stderr": 0.015318257745976708, + "acc_norm": 0.2994413407821229, + "acc_norm_stderr": 0.015318257745976708 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.32653061224489793, + "acc_stderr": 0.03002105623844029, + "acc_norm": 0.32653061224489793, + "acc_norm_stderr": 0.03002105623844029 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.43037974683544306, + "acc_stderr": 0.032230171959375976, + "acc_norm": 0.43037974683544306, + "acc_norm_stderr": 0.032230171959375976 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28096479791395046, + "acc_stderr": 0.011479684550077692, + "acc_norm": 0.28096479791395046, + "acc_norm_stderr": 0.011479684550077692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.03343311240488419, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.03343311240488419 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.037425970438065864, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.037425970438065864 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.4515315923147205, + "mc2_stderr": 0.01639872227117553 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.01547327158398843, + "acc_norm": 0.29515938606847697, + "acc_norm_stderr": 0.01568153522919219 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/zephyr_all_7b", + "model_sha": "bedb59500978ac3964aa9aaf9bd9c73c74ee025d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-13b-n1/result_2023-10-29 14:25:55.json b/jiwoochris/ko-llama2-13b-n1/result_2023-10-29 14:25:55.json new file mode 100644 index 0000000000000000000000000000000000000000..6d6daf4181ffd23349cce76f5a628d53a1ac7686 --- /dev/null +++ b/jiwoochris/ko-llama2-13b-n1/result_2023-10-29 14:25:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.01428052266746733, + "acc_norm": 0.44368600682593856, + "acc_norm_stderr": 0.01451842182567044 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4219279028082055, + "acc_stderr": 0.004928578106026375, + "acc_norm": 0.5553674566819359, + "acc_norm_stderr": 0.004959094146471523 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913236, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5031928480204342, + "acc_stderr": 0.017879598945933085, + "acc_norm": 0.5031928480204342, + "acc_norm_stderr": 0.017879598945933085 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489426, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767776, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767776 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.033864057460620905, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.033864057460620905 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568385, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.031256108244218796, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.031256108244218796 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833713, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833713 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099522, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099522 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569653, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569653 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362233, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362233 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.01885008469646872, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.01885008469646872 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320203, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176852, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176852 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.02679956202488769, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.02679956202488769 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.032006820201639065, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.032006820201639065 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.0325446201076786, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.0325446201076786 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29791395045632335, + "acc_stderr": 0.011680717340400031, + "acc_norm": 0.29791395045632335, + "acc_norm_stderr": 0.011680717340400031 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.034542365853806094, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.034542365853806094 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713613, + "mc2": 0.4716545169998464, + "mc2_stderr": 0.015260622885261386 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5147579693034239, + "acc_stderr": 0.01718286443499856, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549346 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-13b-n1", + "model_sha": "12d3e6b98013770e4f8c8d37ff76d6c9134b39be", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-13b-v4/result_2023-10-22 15:44:08.json b/jiwoochris/ko-llama2-13b-v4/result_2023-10-22 15:44:08.json new file mode 100644 index 0000000000000000000000000000000000000000..afb4ed0a8ec55654db30fdb94c460e1978bd3660 --- /dev/null +++ b/jiwoochris/ko-llama2-13b-v4/result_2023-10-22 15:44:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40017064846416384, + "acc_stderr": 0.014317197787809167, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.014549221105171864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4224258115913165, + "acc_stderr": 0.004929361040558258, + "acc_norm": 0.5571599283011353, + "acc_norm_stderr": 0.004957068377516512 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49169859514687103, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.49169859514687103, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956278, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956278 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008732, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008732 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215636, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215636 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165894, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376882, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376882 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4421965317919075, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.4421965317919075, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442207, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442207 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48623853211009177, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.48623853211009177, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507205, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507205 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639875, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639875 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190735, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190735 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298825, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298825 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.026799562024887688, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.026799562024887688 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3089960886571056, + "acc_stderr": 0.011801729777239249, + "acc_norm": 0.3089960886571056, + "acc_norm_stderr": 0.011801729777239249 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589674, + "mc2": 0.4713625301918517, + "mc2_stderr": 0.015403994277020416 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.01718938362722969, + "acc_norm": 0.5525383707201889, + "acc_norm_stderr": 0.01709519030150058 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-13b-v4", + "model_sha": "a3773012adb9e13b9bd9b15634dfaeb18718c24d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-13b-v5/result_2023-10-24 16:25:31.json b/jiwoochris/ko-llama2-13b-v5/result_2023-10-24 16:25:31.json new file mode 100644 index 0000000000000000000000000000000000000000..fff32d4a288d182ab0fa138dca1accb4c7410a77 --- /dev/null +++ b/jiwoochris/ko-llama2-13b-v5/result_2023-10-24 16:25:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40273037542662116, + "acc_stderr": 0.014332236306790154, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42123083051185023, + "acc_stderr": 0.004927473370720142, + "acc_norm": 0.5584544911372237, + "acc_norm_stderr": 0.004955564650016176 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431665, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431665 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416828, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416828 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.035315058793591834, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.035315058793591834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.02475600038213094, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.02475600038213094 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507748, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507748 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.02418049716437689, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.02418049716437689 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.02677299065336183, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.02677299065336183 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48256880733944957, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.48256880733944957, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639872, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639872 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329882, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329882 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.0273658611315138, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.0273658611315138 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849645, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849645 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4663029303420103, + "mc2_stderr": 0.015238838193243256 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5100354191263282, + "acc_stderr": 0.017186891286894056, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556224 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-13b-v5", + "model_sha": "28f20014bc519440b6c16a65adf6545c1c9687b6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-13b-v6/result_2023-10-28 13:18:01.json b/jiwoochris/ko-llama2-13b-v6/result_2023-10-28 13:18:01.json new file mode 100644 index 0000000000000000000000000000000000000000..12677716637fc1a18660b5f0090ed3424a335675 --- /dev/null +++ b/jiwoochris/ko-llama2-13b-v6/result_2023-10-28 13:18:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938215, + "acc_norm": 0.45733788395904434, + "acc_norm_stderr": 0.014558106543924067 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4204341764588727, + "acc_stderr": 0.004926198483948701, + "acc_norm": 0.5538737303326031, + "acc_norm_stderr": 0.004960732382255241 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.017879248970584388, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.017879248970584388 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731837, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731837 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.024026846392873506, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.024026846392873506 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361826, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361826 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47155963302752296, + "acc_stderr": 0.021402615697348047, + "acc_norm": 0.47155963302752296, + "acc_norm_stderr": 0.021402615697348047 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3349673202614379, + "acc_stderr": 0.01909422816700032, + "acc_norm": 0.3349673202614379, + "acc_norm_stderr": 0.01909422816700032 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190735, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190735 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402543, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402543 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.02725720260611495, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.02725720260611495 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.03251215201141018, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.03251215201141018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.011787910251664587, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.011787910251664587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4626627507088543, + "mc2_stderr": 0.015202563658823371 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.51357733175915, + "acc_stderr": 0.01718401506040145, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.01712282914329265 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-13b-v6", + "model_sha": "2606639bb18ca27586615693d937d41d1a756391", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-v1/result_2023-10-21 08:55:17.json b/jiwoochris/ko-llama2-v1/result_2023-10-21 08:55:17.json new file mode 100644 index 0000000000000000000000000000000000000000..88f2ca51e279159b37153ebfb989b7d458f455b6 --- /dev/null +++ b/jiwoochris/ko-llama2-v1/result_2023-10-21 08:55:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.01428052266746733, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4217287392949612, + "acc_stderr": 0.004928263494616727, + "acc_norm": 0.5544712208723361, + "acc_norm_stderr": 0.004960082528852438 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431662, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431662 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419995, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215636, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215636 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507748, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507748 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895992, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895992 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0365634365335316, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0365634365335316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523864, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361826, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361826 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33169934640522875, + "acc_stderr": 0.01904748523936038, + "acc_norm": 0.33169934640522875, + "acc_norm_stderr": 0.01904748523936038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.03070137211151092, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.03070137211151092 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2966101694915254, + "acc_stderr": 0.011665946586082854, + "acc_norm": 0.2966101694915254, + "acc_norm_stderr": 0.011665946586082854 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.4746429594651757, + "mc2_stderr": 0.01531218992321956 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5206611570247934, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-v1", + "model_sha": "4253098940413125f8f0847038c076d42e5b2c59", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-v2/result_2023-10-21 08:55:35.json b/jiwoochris/ko-llama2-v2/result_2023-10-21 08:55:35.json new file mode 100644 index 0000000000000000000000000000000000000000..54de9bb1e0b8f317ba408adc4ed746e2f1614c8e --- /dev/null +++ b/jiwoochris/ko-llama2-v2/result_2023-10-21 08:55:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955, + "acc_norm": 0.40187713310580203, + "acc_norm_stderr": 0.014327268614578273 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41276638119896436, + "acc_stderr": 0.004913253031155685, + "acc_norm": 0.5246962756423024, + "acc_norm_stderr": 0.004983691099110914 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.01778403453499246, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.01778403453499246 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.02798268045975956, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.02798268045975956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102315, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235897, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235897 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952166, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952166 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165582, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165582 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.026538189104705484, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.026538189104705484 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662724, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662724 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529424, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529424 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138286, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.018521756215423027, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.018521756215423027 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.034057028381856924, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.034057028381856924 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.031912820526692774, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.031912820526692774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3070404172099087, + "acc_stderr": 0.011780959114513778, + "acc_norm": 0.3070404172099087, + "acc_norm_stderr": 0.011780959114513778 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589674, + "mc2": 0.47296551445370655, + "mc2_stderr": 0.016489115600580966 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5631641086186541, + "acc_stderr": 0.01705263355985607, + "acc_norm": 0.5855962219598583, + "acc_norm_stderr": 0.016936583383943615 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-v2", + "model_sha": "bfe6a2095cc43e82103cbdff36721810ef4057e3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-v3/result_2023-10-21 15:59:59.json b/jiwoochris/ko-llama2-v3/result_2023-10-21 15:59:59.json new file mode 100644 index 0000000000000000000000000000000000000000..0ef8bf12fb98ef853cf37ee57322b477166c83a1 --- /dev/null +++ b/jiwoochris/ko-llama2-v3/result_2023-10-21 15:59:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938169, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471625 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4231228838876718, + "acc_stderr": 0.0049304485271466575, + "acc_norm": 0.5584544911372237, + "acc_norm_stderr": 0.004955564650016177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431665, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431665 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484068, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484068 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929187, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929187 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555498, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555498 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.03050329201334259, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.03050329201334259 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094527, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094527 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.026720034380514995, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.026720034380514995 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194045, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529424, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529424 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176647, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176647 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.018771683893528186, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.018771683893528186 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639872, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639872 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.0273658611315138, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.0273658611315138 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.032006820201639065, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.032006820201639065 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.0325446201076786, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.0325446201076786 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3089960886571056, + "acc_stderr": 0.011801729777239246, + "acc_norm": 0.3089960886571056, + "acc_norm_stderr": 0.011801729777239246 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589678, + "mc2": 0.46187837195291875, + "mc2_stderr": 0.015227305019069102 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.01718976703213082, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556224 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-v3", + "model_sha": "277462786fe73ea1b6f50d5e45ee1be5854611a1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/llama2_cot-13b-v2/result_2023-11-15 06:20:20.json b/jiwoochris/llama2_cot-13b-v2/result_2023-11-15 06:20:20.json new file mode 100644 index 0000000000000000000000000000000000000000..a29c13327e3ddce6ab09f8396253134a7efd27d9 --- /dev/null +++ b/jiwoochris/llama2_cot-13b-v2/result_2023-11-15 06:20:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938217, + "acc_norm": 0.4453924914675768, + "acc_norm_stderr": 0.014523987638344076 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42123083051185023, + "acc_stderr": 0.004927473370720142, + "acc_norm": 0.5585540728938458, + "acc_norm_stderr": 0.0049554475646940545 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5031928480204342, + "acc_stderr": 0.017879598945933082, + "acc_norm": 0.5031928480204342, + "acc_norm_stderr": 0.017879598945933082 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.02483881198803315, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02483881198803315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568385, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112126, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112126 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41329479768786126, + "acc_stderr": 0.02651126136940924, + "acc_norm": 0.41329479768786126, + "acc_norm_stderr": 0.02651126136940924 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.02775653525734767, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.02775653525734767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.018771683893528183, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.018771683893528183 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639872, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639872 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025445, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.02714627193662517, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.02714627193662517 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2966101694915254, + "acc_stderr": 0.011665946586082849, + "acc_norm": 0.2966101694915254, + "acc_norm_stderr": 0.011665946586082849 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.01600265148736101, + "mc2": 0.4675588535923083, + "mc2_stderr": 0.01519834664408876 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.01718832921965428, + "acc_norm": 0.5230224321133412, + "acc_norm_stderr": 0.01717212154672763 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/llama2_cot-13b-v2", + "model_sha": "7aa89f6afcd8a48eb30f8bc8a6013fb7eee105b0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/llama2_tmt-13b-v1/result_2023-11-02 08:59:31.json b/jiwoochris/llama2_tmt-13b-v1/result_2023-11-02 08:59:31.json new file mode 100644 index 0000000000000000000000000000000000000000..abfe5a2236cc82c10056c5ba6877c5f079155fe6 --- /dev/null +++ b/jiwoochris/llama2_tmt-13b-v1/result_2023-11-02 08:59:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38054607508532423, + "acc_stderr": 0.01418827771234981, + "acc_norm": 0.42918088737201365, + "acc_norm_stderr": 0.014464085894870657 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42342162915753834, + "acc_stderr": 0.004930911515084777, + "acc_norm": 0.5576578370842462, + "acc_norm_stderr": 0.0049564940598648966 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49680715197956576, + "acc_stderr": 0.017879598945933068, + "acc_norm": 0.49680715197956576, + "acc_norm_stderr": 0.017879598945933068 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416828, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416828 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40336134453781514, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.40336134453781514, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.024838811988033154, + "acc_norm": 0.4, + "acc_norm_stderr": 0.024838811988033154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165894, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730554, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730554 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261743, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261743 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194045, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249032, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249032 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.01866335967146367, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.01866335967146367 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022125, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.03405702838185692, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.03405702838185692 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536048, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536048 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.25735294117647056, + "acc_stderr": 0.02655651947004153, + "acc_norm": 0.25735294117647056, + "acc_norm_stderr": 0.02655651947004153 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4978902953586498, + "acc_stderr": 0.03254693801802007, + "acc_norm": 0.4978902953586498, + "acc_norm_stderr": 0.03254693801802007 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271815, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271815 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.471591282984116, + "mc2_stderr": 0.015363705899479043 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4675324675324675, + "acc_stderr": 0.017154073716682865, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.01718689128689406 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/llama2_tmt-13b-v1", + "model_sha": "19ab331bb4323f92cc3e5b17e3a7c2d629d5aa01", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/llama2_tmt-13b-v2/result_2023-11-07 12:17:35.json b/jiwoochris/llama2_tmt-13b-v2/result_2023-11-07 12:17:35.json new file mode 100644 index 0000000000000000000000000000000000000000..c546ba62476599fbecfd028f1ba371f1087c1f1f --- /dev/null +++ b/jiwoochris/llama2_tmt-13b-v2/result_2023-11-07 12:17:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3771331058020478, + "acc_stderr": 0.014163366896192608, + "acc_norm": 0.4325938566552901, + "acc_norm_stderr": 0.014478005694182531 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4217287392949612, + "acc_stderr": 0.004928263494616727, + "acc_norm": 0.5551682931686915, + "acc_norm_stderr": 0.004959315198011164 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4942528735632184, + "acc_stderr": 0.01787878232612924, + "acc_norm": 0.4942528735632184, + "acc_norm_stderr": 0.01787878232612924 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.024838811988033158, + "acc_norm": 0.4, + "acc_norm_stderr": 0.024838811988033158 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895992, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895992 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0365634365335316, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0365634365335316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655795, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.026720034380514995, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.026720034380514995 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.0276847214156562, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.0276847214156562 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39378238341968913, + "acc_stderr": 0.03526077095548237, + "acc_norm": 0.39378238341968913, + "acc_norm_stderr": 0.03526077095548237 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.02140695268815158, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.02140695268815158 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31209150326797386, + "acc_stderr": 0.01874501120127766, + "acc_norm": 0.31209150326797386, + "acc_norm_stderr": 0.01874501120127766 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.03485946096475741, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.03485946096475741 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.026679252270103117, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.026679252270103117 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2894393741851369, + "acc_stderr": 0.011582659702210236, + "acc_norm": 0.2894393741851369, + "acc_norm_stderr": 0.011582659702210236 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4683789011362333, + "mc2_stderr": 0.0153748011102314 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.01719034212344866, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.01713648762604985 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/llama2_tmt-13b-v2", + "model_sha": "430d81fb7292f40407b87f989603131bf16f085a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/polyglot_350/result_2023-10-19 07:42:25.json b/jiwoochris/polyglot_350/result_2023-10-19 07:42:25.json new file mode 100644 index 0000000000000000000000000000000000000000..a0fa2a7beb7e74b2c8c3a3c81e1924d0046a1a1c --- /dev/null +++ b/jiwoochris/polyglot_350/result_2023-10-19 07:42:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28498293515358364, + "acc_stderr": 0.013191348179838792, + "acc_norm": 0.3174061433447099, + "acc_norm_stderr": 0.01360223908803817 + }, + "harness|ko_hellaswag|10": { + "acc": 0.371539533957379, + "acc_stderr": 0.004822286556305217, + "acc_norm": 0.4738099980083649, + "acc_norm_stderr": 0.004982931565945953 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.18128654970760233, + "acc_stderr": 0.029547741687640024, + "acc_norm": 0.18128654970760233, + "acc_norm_stderr": 0.029547741687640024 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.36893203883495146, + "acc_stderr": 0.047776151811567386, + "acc_norm": 0.36893203883495146, + "acc_norm_stderr": 0.047776151811567386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.014485656041669173, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.014485656041669173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2379421221864952, + "acc_stderr": 0.024185150647818707, + "acc_norm": 0.2379421221864952, + "acc_norm_stderr": 0.024185150647818707 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.15246636771300448, + "acc_stderr": 0.024126204813252863, + "acc_norm": 0.15246636771300448, + "acc_norm_stderr": 0.024126204813252863 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847836, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847836 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.033184773338453315, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.033184773338453315 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135303, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135303 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062947, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062947 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.03086868260412162, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.03086868260412162 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.024359581465396983, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.024359581465396983 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.02704685763071668, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.02704685763071668 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493864, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493864 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577656, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577656 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.03351953879521271, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.03351953879521271 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23765432098765432, + "acc_stderr": 0.023683591837008553, + "acc_norm": 0.23765432098765432, + "acc_norm_stderr": 0.023683591837008553 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.03469713791704372, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.03469713791704372 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3541284403669725, + "acc_stderr": 0.020504729013829104, + "acc_norm": 0.3541284403669725, + "acc_norm_stderr": 0.020504729013829104 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.02582916327275748, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.02582916327275748 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036622, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036622 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902013, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902013 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.21940928270042195, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.21940928270042195, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178475, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350194, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350194 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253595, + "mc2": 0.40641374284087445, + "mc2_stderr": 0.014952562897051682 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2857142857142857, + "acc_stderr": 0.015531620786986736, + "acc_norm": 0.3246753246753247, + "acc_norm_stderr": 0.016098883939346463 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/polyglot_350", + "model_sha": "9fb5a66197344b0ec71467e384620bd610668339", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jjourney1125/M-SOLAR-10.7B-v1.0/result_2023-12-25 04:59:35.json b/jjourney1125/M-SOLAR-10.7B-v1.0/result_2023-12-25 04:59:35.json new file mode 100644 index 0000000000000000000000000000000000000000..222213d0d0b12301fa5f2c5f670893a17f13100d --- /dev/null +++ b/jjourney1125/M-SOLAR-10.7B-v1.0/result_2023-12-25 04:59:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4598976109215017, + "acc_stderr": 0.014564318856924848, + "acc_norm": 0.49573378839590443, + "acc_norm_stderr": 0.014610858923956952 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4403505277833101, + "acc_stderr": 0.004954146286513353, + "acc_norm": 0.6011750647281418, + "acc_norm_stderr": 0.004886559008754985 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6538952745849298, + "acc_stderr": 0.01701196526641208, + "acc_norm": 0.6538952745849298, + "acc_norm_stderr": 0.01701196526641208 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.032662042990646796, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.032662042990646796 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.594855305466238, + "acc_stderr": 0.02788238379132595, + "acc_norm": 0.594855305466238, + "acc_norm_stderr": 0.02788238379132595 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786753, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786753 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.04858083574266345, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.04858083574266345 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.025124653525885103, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.025124653525885103 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.02786932057166462, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02786932057166462 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890488, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890488 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5622641509433962, + "acc_stderr": 0.03053333843046752, + "acc_norm": 0.5622641509433962, + "acc_norm_stderr": 0.03053333843046752 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465073, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465073 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.41721854304635764, + "acc_stderr": 0.04026141497634612, + "acc_norm": 0.41721854304635764, + "acc_norm_stderr": 0.04026141497634612 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.025670080636909193, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.025670080636909193 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.027306625297327688, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.027306625297327688 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.689908256880734, + "acc_stderr": 0.01983084968443975, + "acc_norm": 0.689908256880734, + "acc_norm_stderr": 0.01983084968443975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.029189805673587105, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.029189805673587105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21899441340782122, + "acc_stderr": 0.01383167668730321, + "acc_norm": 0.21899441340782122, + "acc_norm_stderr": 0.01383167668730321 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428188, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428188 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.03164209487942942, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.03164209487942942 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.02747974455080851, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.02747974455080851 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4015645371577575, + "acc_stderr": 0.012520315120147132, + "acc_norm": 0.4015645371577575, + "acc_norm_stderr": 0.012520315120147132 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.016451264440068242, + "mc2": 0.49232422202678827, + "mc2_stderr": 0.0154380637666786 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5749704840613932, + "acc_stderr": 0.016996016308362887, + "acc_norm": 0.6221959858323495, + "acc_norm_stderr": 0.01666908284069498 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jjourney1125/M-SOLAR-10.7B-v1.0", + "model_sha": "744f2fd6625d908aa47f5adeaa127fd661198054", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jjourney1125/llama2-13b-v1/result_2023-12-22 04:35:39.json b/jjourney1125/llama2-13b-v1/result_2023-12-22 04:35:39.json new file mode 100644 index 0000000000000000000000000000000000000000..1a63504dd7ea8333bcb21511d7e3972751251a61 --- /dev/null +++ b/jjourney1125/llama2-13b-v1/result_2023-12-22 04:35:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38993174061433444, + "acc_stderr": 0.014252959848892894, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.01447113339264247 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4091814379605656, + "acc_stderr": 0.004906779523192673, + "acc_norm": 0.5419239195379406, + "acc_norm_stderr": 0.004972210244020565 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.01785041079438017, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.01785041079438017 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.031565646822367836, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.031565646822367836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.033764582465095665, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.033764582465095665 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717861, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717861 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281335, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281335 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5357798165137615, + "acc_stderr": 0.021382364775701906, + "acc_norm": 0.5357798165137615, + "acc_norm_stderr": 0.021382364775701906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.02862441255016795, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.02862441255016795 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.040633027314866704, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.040633027314866704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.019659922493623343, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.019659922493623343 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293647, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293647 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28156424581005585, + "acc_stderr": 0.015042290171866113, + "acc_norm": 0.28156424581005585, + "acc_norm_stderr": 0.015042290171866113 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.01184923429145932, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.01184923429145932 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.44709065910683965, + "mc2_stderr": 0.015327918263132853 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5430932703659976, + "acc_stderr": 0.017126389093086777, + "acc_norm": 0.5985832349468713, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jjourney1125/llama2-13b-v1", + "model_sha": "202ead34796d321bc87053c81cc88026f8cac022", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jjourney1125/llama2-dev/result_2023-12-16 13:31:05.json b/jjourney1125/llama2-dev/result_2023-12-16 13:31:05.json new file mode 100644 index 0000000000000000000000000000000000000000..0bdc6a8d489672f4ff1399c7674bbded8a757227 --- /dev/null +++ b/jjourney1125/llama2-dev/result_2023-12-16 13:31:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.013697432466693246, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.014194389086685272 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3543118900617407, + "acc_stderr": 0.0047732675101127406, + "acc_norm": 0.4435371439952201, + "acc_norm_stderr": 0.004957863944093121 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.017865944827291615, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.017865944827291615 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.02817391776176287, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.02817391776176287 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.03979236637497411, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.03979236637497411 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.02475600038213094, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.02475600038213094 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.031947400722655395, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.031947400722655395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02306818884826111, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02306818884826111 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272438, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272438 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44954128440366975, + "acc_stderr": 0.021327881417823373, + "acc_norm": 0.44954128440366975, + "acc_norm_stderr": 0.021327881417823373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259287, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3349673202614379, + "acc_stderr": 0.01909422816700031, + "acc_norm": 0.3349673202614379, + "acc_norm_stderr": 0.01909422816700031 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802747, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802747 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497722, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497722 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.363754889178618, + "acc_stderr": 0.012286991879902887, + "acc_norm": 0.363754889178618, + "acc_norm_stderr": 0.012286991879902887 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.444838685797901, + "mc2_stderr": 0.015532530203119514 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3742621015348288, + "acc_stderr": 0.016637917789798735, + "acc_norm": 0.4179456906729634, + "acc_norm_stderr": 0.016957292005279723 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jjourney1125/llama2-dev", + "model_sha": "66931bf246639e144dcd1e8b255a2222e210e2f0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jojo0217/ChatSKKU5.8B/result_2023-09-27 14:33:28.json b/jojo0217/ChatSKKU5.8B/result_2023-09-27 14:33:28.json new file mode 100644 index 0000000000000000000000000000000000000000..b7adbdeaed2eec53b2edde597bb693f434653094 --- /dev/null +++ b/jojo0217/ChatSKKU5.8B/result_2023-09-27 14:33:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28924914675767915, + "acc_stderr": 0.013250012579393443, + "acc_norm": 0.3293515358361775, + "acc_norm_stderr": 0.013734057652635474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3754232224656443, + "acc_stderr": 0.004832423630593185, + "acc_norm": 0.48028281218880703, + "acc_norm_stderr": 0.004985900172317694 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.36893203883495146, + "acc_stderr": 0.04777615181156739, + "acc_norm": 0.36893203883495146, + "acc_norm_stderr": 0.04777615181156739 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20434227330779056, + "acc_stderr": 0.014419123980931904, + "acc_norm": 0.20434227330779056, + "acc_norm_stderr": 0.014419123980931904 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565264, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.09865470852017937, + "acc_stderr": 0.020013729184919227, + "acc_norm": 0.09865470852017937, + "acc_norm_stderr": 0.020013729184919227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3383838383838384, + "acc_stderr": 0.033711241426263014, + "acc_norm": 0.3383838383838384, + "acc_norm_stderr": 0.033711241426263014 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36923076923076925, + "acc_stderr": 0.02446861524147891, + "acc_norm": 0.36923076923076925, + "acc_norm_stderr": 0.02446861524147891 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493864, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493864 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255168, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255168 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.031524391865554016, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.031524391865554016 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02201908001221789, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02201908001221789 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2191358024691358, + "acc_stderr": 0.023016705640262206, + "acc_norm": 0.2191358024691358, + "acc_norm_stderr": 0.023016705640262206 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3504587155963303, + "acc_stderr": 0.02045607759982446, + "acc_norm": 0.3504587155963303, + "acc_norm_stderr": 0.02045607759982446 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.026173908506718576, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.026173908506718576 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.21487603305785125, + "acc_stderr": 0.037494924487096966, + "acc_norm": 0.21487603305785125, + "acc_norm_stderr": 0.037494924487096966 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.010946570966348775, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.010946570966348775 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087298, + "mc2": 0.41570723548070315, + "mc2_stderr": 0.014870707305351522 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791248, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.016327334806429138 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jojo0217/ChatSKKU5.8B", + "model_sha": "f10a62ae90265032a2fc60b9484a501028b539bb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jyoung105/KoR-Orca-Platypus-13B-neft/result_2023-10-23 17:13:22.json b/jyoung105/KoR-Orca-Platypus-13B-neft/result_2023-10-23 17:13:22.json new file mode 100644 index 0000000000000000000000000000000000000000..4774e48739fb62dd429d8fdcaf537b41b1aa5268 --- /dev/null +++ b/jyoung105/KoR-Orca-Platypus-13B-neft/result_2023-10-23 17:13:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955005, + "acc_norm": 0.40955631399317405, + "acc_norm_stderr": 0.014370358632472451 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4001194981079466, + "acc_stderr": 0.004889210628907952, + "acc_norm": 0.5324636526588329, + "acc_norm_stderr": 0.0049792529549773125 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.017879248970584388, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.017879248970584388 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.0362933532994786, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.0362933532994786 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.02836504154256457, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.02836504154256457 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929187, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929187 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.024666744915187236, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.024666744915187236 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.02829205683011274, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.02829205683011274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5854700854700855, + "acc_stderr": 0.03227396567623779, + "acc_norm": 0.5854700854700855, + "acc_norm_stderr": 0.03227396567623779 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389174, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389174 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712173, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712173 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.035344398485395785, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.035344398485395785 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.026538189104705488, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.026538189104705488 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43486238532110094, + "acc_stderr": 0.02125463146560928, + "acc_norm": 0.43486238532110094, + "acc_norm_stderr": 0.02125463146560928 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.027996723180631455, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.027996723180631455 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092484, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092484 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534774, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534774 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.25, + "acc_stderr": 0.026303648393696036, + "acc_norm": 0.25, + "acc_norm_stderr": 0.026303648393696036 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.03113088039623593, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.03113088039623593 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715931, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715931 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.015905987048184828, + "mc2": 0.45413657999042506, + "mc2_stderr": 0.015074046336424325 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3990554899645809, + "acc_stderr": 0.016836377292849296, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.01718401506040146 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jyoung105/KoR-Orca-Platypus-13B-neft", + "model_sha": "a02ee5b06d952c0dc23f5868d59778638696ebfd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jyoung105/ko-platypus2-collective-13b/result_2023-10-14 09:02:59.json b/jyoung105/ko-platypus2-collective-13b/result_2023-10-14 09:02:59.json new file mode 100644 index 0000000000000000000000000000000000000000..a3ae825ed3857faec885fd37f5ce521d00f57798 --- /dev/null +++ b/jyoung105/ko-platypus2-collective-13b/result_2023-10-14 09:02:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3856655290102389, + "acc_stderr": 0.014224250973257182, + "acc_norm": 0.44283276450511944, + "acc_norm_stderr": 0.014515573873348906 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40798645688109936, + "acc_stderr": 0.004904561795919, + "acc_norm": 0.5428201553475404, + "acc_norm_stderr": 0.004971449552787176 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.017855434554042, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.017855434554042 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110655, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110655 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.032061837832361516, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.032061837832361516 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042328, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042328 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353985, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353985 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490437, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490437 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.019291961895066382, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.019291961895066382 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409146, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409146 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714864, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3728813559322034, + "acc_stderr": 0.012350630058333362, + "acc_norm": 0.3728813559322034, + "acc_norm_stderr": 0.012350630058333362 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006514, + "mc2": 0.4442744883801461, + "mc2_stderr": 0.015229595169585636 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.37662337662337664, + "acc_stderr": 0.01665879987405197, + "acc_norm": 0.42739079102715466, + "acc_norm_stderr": 0.01700812984482316 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jyoung105/ko-platypus2-collective-13b", + "model_sha": "a42bdc7082f08920ee23b5ed9946aa81008de332", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jyoung105/ko-platypus2-collective-13b_v1.1/result_2023-10-20 02:08:26.json b/jyoung105/ko-platypus2-collective-13b_v1.1/result_2023-10-20 02:08:26.json new file mode 100644 index 0000000000000000000000000000000000000000..ae7762a74656e1664b3ee943f8e935558f239c7b --- /dev/null +++ b/jyoung105/ko-platypus2-collective-13b_v1.1/result_2023-10-20 02:08:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38054607508532423, + "acc_stderr": 0.014188277712349812, + "acc_norm": 0.4453924914675768, + "acc_norm_stderr": 0.014523987638344078 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4084843656642103, + "acc_stderr": 0.0049054894940050746, + "acc_norm": 0.5414260107548298, + "acc_norm_stderr": 0.0049726258487026555 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.017862091778507855, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.017862091778507855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.037117251907407486, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.037117251907407486 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562804, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562804 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.032061837832361516, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.032061837832361516 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.02508830145469484, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.02508830145469484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.02832774309156106, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.02832774309156106 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641087, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641087 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756643, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756643 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194048, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194048 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569653, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569653 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353982, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353982 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.028408302020332687, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.028408302020332687 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483184, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483184 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3545751633986928, + "acc_stderr": 0.019353360547553704, + "acc_norm": 0.3545751633986928, + "acc_norm_stderr": 0.019353360547553704 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2659217877094972, + "acc_stderr": 0.014776765066438885, + "acc_norm": 0.2659217877094972, + "acc_norm_stderr": 0.014776765066438885 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37222946544980445, + "acc_stderr": 0.012346241297204368, + "acc_norm": 0.37222946544980445, + "acc_norm_stderr": 0.012346241297204368 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4598250500026081, + "mc2_stderr": 0.01526561900775728 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3730814639905549, + "acc_stderr": 0.016627318275137432, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jyoung105/ko-platypus2-collective-13b_v1.1", + "model_sha": "ccb5e5262ad155e445b27ca11c6bb946f56fc4d1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kaist-ai/prometheus-13b-v1.0/result_2023-11-20 14:31:31.json b/kaist-ai/prometheus-13b-v1.0/result_2023-11-20 14:31:31.json new file mode 100644 index 0000000000000000000000000000000000000000..210f4bbf0f582cfd74015c011b21440a2e3c9f34 --- /dev/null +++ b/kaist-ai/prometheus-13b-v1.0/result_2023-11-20 14:31:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28668941979522183, + "acc_stderr": 0.013214986329274757, + "acc_norm": 0.32593856655290104, + "acc_norm_stderr": 0.013697432466693242 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3300139414459271, + "acc_stderr": 0.00469256765596177, + "acc_norm": 0.40021907986456884, + "acc_norm_stderr": 0.00488941312620877 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.03743979825926401, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.03743979825926401 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781169, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781169 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41507024265644954, + "acc_stderr": 0.017620137003655268, + "acc_norm": 0.41507024265644954, + "acc_norm_stderr": 0.017620137003655268 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031022, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3954983922829582, + "acc_stderr": 0.027770918531427838, + "acc_norm": 0.3954983922829582, + "acc_norm_stderr": 0.027770918531427838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199986, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199986 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5769230769230769, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.5769230769230769, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585475, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585475 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02622964917882116, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02622964917882116 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3412844036697248, + "acc_stderr": 0.020328612816592435, + "acc_norm": 0.3412844036697248, + "acc_norm_stderr": 0.020328612816592435 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.027732834353363944, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.027732834353363944 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.018311653053648222, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.018311653053648222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.02691748122437725, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.02691748122437725 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.03164209487942941, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.03164209487942941 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.37130801687763715, + "acc_stderr": 0.031450686007448596, + "acc_norm": 0.37130801687763715, + "acc_norm_stderr": 0.031450686007448596 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048231, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.03713158067481913, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.03713158067481913 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361005, + "mc2": 0.4511958734873742, + "mc2_stderr": 0.016021768655782184 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3612750885478158, + "acc_stderr": 0.01651546302241201, + "acc_norm": 0.39433293978748524, + "acc_norm_stderr": 0.016802090674893206 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kaist-ai/prometheus-13b-v1.0", + "model_sha": "9088377314f91af4b48940e09a0c76d0878f5020", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kaist-ai/selfee-13b-delta/result_2023-11-20 14:29:53.json b/kaist-ai/selfee-13b-delta/result_2023-11-20 14:29:53.json new file mode 100644 index 0000000000000000000000000000000000000000..a2c5544cdd30dca2dddbc6037214f2ea38e05c67 --- /dev/null +++ b/kaist-ai/selfee-13b-delta/result_2023-11-20 14:29:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2098976109215017, + "acc_stderr": 0.011900548748047452, + "acc_norm": 0.2568259385665529, + "acc_norm_stderr": 0.012766923794116796 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2517426807408883, + "acc_stderr": 0.004331271717773851, + "acc_norm": 0.24307906791475803, + "acc_norm_stderr": 0.004280658234718773 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803627, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803627 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.031546980450822305, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.031546980450822305 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378984, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22766217870257038, + "mc1_stderr": 0.014679255032111066, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07674144037780402, + "acc_stderr": 0.009151482698827047, + "acc_norm": 0.1959858323494687, + "acc_norm_stderr": 0.013647685567768866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kaist-ai/selfee-13b-delta", + "model_sha": "d3d65ca5e956a520c65bbdf9cf060f8d88b3a687", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kakaobrain/kogpt/result_2023-11-07 09:38:44.json b/kakaobrain/kogpt/result_2023-11-07 09:38:44.json new file mode 100644 index 0000000000000000000000000000000000000000..d3f041c4b14dd005cbc78e2a1e925b707ea3eccd --- /dev/null +++ b/kakaobrain/kogpt/result_2023-11-07 09:38:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.0127669237941168, + "acc_norm": 0.3225255972696246, + "acc_norm_stderr": 0.013659980894277366 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3487353116908982, + "acc_stderr": 0.004755960559929155, + "acc_norm": 0.4329814777932683, + "acc_norm_stderr": 0.004944755230598386 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822585, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822585 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26053639846743293, + "acc_stderr": 0.01569600856380709, + "acc_norm": 0.26053639846743293, + "acc_norm_stderr": 0.01569600856380709 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292323, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292323 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553029, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553029 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291954, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291954 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836555, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087764, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087764 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.023901157979402527, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.023901157979402527 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21674876847290642, + "acc_stderr": 0.028990331252516235, + "acc_norm": 0.21674876847290642, + "acc_norm_stderr": 0.028990331252516235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.025822106119415898, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.025822106119415898 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02934311479809446, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02934311479809446 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.02590789712240817, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.02590789712240817 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724138, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724138 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696545, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696545 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213322, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213322 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.021227082449445045, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.021227082449445045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22254335260115607, + "acc_stderr": 0.02239421566194282, + "acc_norm": 0.22254335260115607, + "acc_norm_stderr": 0.02239421566194282 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.03559039531617342, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.03559039531617342 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.024659685185967284, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.024659685185967284 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32642487046632124, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.32642487046632124, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25871559633027524, + "acc_stderr": 0.01877605231961962, + "acc_norm": 0.25871559633027524, + "acc_norm_stderr": 0.01877605231961962 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.02573885479781874, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.02573885479781874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.017282760695167404, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.017282760695167404 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290403, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.0356236785009539, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.0356236785009539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364552, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364552 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.011005971399927227, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.011005971399927227 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501943, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501943 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087298, + "mc2": 0.42031551863421324, + "mc2_stderr": 0.01497080980038926 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346453, + "acc_norm": 0.39315230224321135, + "acc_norm_stderr": 0.01679326280128708 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kakaobrain/kogpt", + "model_sha": "9abbe61d9e8d51adc4986e7725a1851e2264d4ff", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kfkas/Llama-2-ko-7b-Chat/result_2023-09-27 05:00:55.json b/kfkas/Llama-2-ko-7b-Chat/result_2023-09-27 05:00:55.json new file mode 100644 index 0000000000000000000000000000000000000000..517a58da1c8600f851d65243c7fcecd91d6206c6 --- /dev/null +++ b/kfkas/Llama-2-ko-7b-Chat/result_2023-09-27 05:00:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.013697432466693242, + "acc_norm": 0.3839590443686007, + "acc_norm_stderr": 0.01421244498065189 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3875721967735511, + "acc_stderr": 0.004862003566798545, + "acc_norm": 0.504779924317865, + "acc_norm_stderr": 0.004989553396413091 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.033773102522091945, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.033773102522091945 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3499361430395913, + "acc_stderr": 0.017055679797150423, + "acc_norm": 0.3499361430395913, + "acc_norm_stderr": 0.017055679797150423 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.02592237178881879, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.02592237178881879 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3787878787878788, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.3787878787878788, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774632, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774632 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.028657491285071966, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.028657491285071966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2743589743589744, + "acc_stderr": 0.022622765767493197, + "acc_norm": 0.2743589743589744, + "acc_norm_stderr": 0.022622765767493197 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.02652270967466777, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.02652270967466777 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32075471698113206, + "acc_stderr": 0.028727502957880267, + "acc_norm": 0.32075471698113206, + "acc_norm_stderr": 0.028727502957880267 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960955, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960955 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.02241804289111394, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.02241804289111394 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03309615177059006, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03309615177059006 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388677003, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388677003 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868052, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868052 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3688073394495413, + "acc_stderr": 0.02068622756072955, + "acc_norm": 0.3688073394495413, + "acc_norm_stderr": 0.02068622756072955 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604672, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604672 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.027121956071388852, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.027121956071388852 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257017, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257017 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.02866685779027465, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.02866685779027465 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26988265971316816, + "acc_stderr": 0.011337381084250411, + "acc_norm": 0.26988265971316816, + "acc_norm_stderr": 0.011337381084250411 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.034277431758165236, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.034277431758165236 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731613, + "mc2": 0.3670922997204656, + "mc2_stderr": 0.014677148528936845 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2361275088547816, + "acc_stderr": 0.014601536093324388, + "acc_norm": 0.3116883116883117, + "acc_norm_stderr": 0.01592456760735833 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kfkas/Llama-2-ko-7b-Chat", + "model_sha": "6d94c8e5b34fb09e80601548761a8dbd54bc0bba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kfkas/llama-2-koen-13b-SFT-LoRA-4bit-re/result_2023-12-03 08:29:51.json b/kfkas/llama-2-koen-13b-SFT-LoRA-4bit-re/result_2023-12-03 08:29:51.json new file mode 100644 index 0000000000000000000000000000000000000000..31c0cb89c64b9ea679cc2ec41663a1c898084b9c --- /dev/null +++ b/kfkas/llama-2-koen-13b-SFT-LoRA-4bit-re/result_2023-12-03 08:29:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3779863481228669, + "acc_stderr": 0.014169664520303096, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137998 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41884086835291773, + "acc_stderr": 0.004923609207861537, + "acc_norm": 0.5644293965345548, + "acc_norm_stderr": 0.00494818136702496 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.017855434554041996, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.017855434554041996 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105908, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105908 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4494949494949495, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.4494949494949495, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.02435958146539698, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.02435958146539698 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.0332085274234831, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.0332085274234831 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.027906150826041143, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.027906150826041143 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.03005258057955784, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.03005258057955784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.47761194029850745, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.47761194029850745, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730575, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.026564178111422615, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.026564178111422615 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44036697247706424, + "acc_stderr": 0.02128431062376155, + "acc_norm": 0.44036697247706424, + "acc_norm_stderr": 0.02128431062376155 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486634, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486634 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079103, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079103 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682485, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682485 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2979591836734694, + "acc_stderr": 0.029279567411065674, + "acc_norm": 0.2979591836734694, + "acc_norm_stderr": 0.029279567411065674 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2953063885267275, + "acc_stderr": 0.011651061936208828, + "acc_norm": 0.2953063885267275, + "acc_norm_stderr": 0.011651061936208828 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904718, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904718 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.039036986477484416, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.039036986477484416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.4378033526867641, + "mc2_stderr": 0.014971475297462514 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4592680047225502, + "acc_stderr": 0.017133218276537673, + "acc_norm": 0.5336481700118064, + "acc_norm_stderr": 0.017151384117131862 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kfkas/llama-2-koen-13b-SFT-LoRA-4bit-re", + "model_sha": "c7993d169e2a2b9b7615212d1684daa76c4cb7e7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kfkas/llama-2-koen-13b-SFT-LoRA-4bit/result_2023-11-30 12:50:03.json b/kfkas/llama-2-koen-13b-SFT-LoRA-4bit/result_2023-11-30 12:50:03.json new file mode 100644 index 0000000000000000000000000000000000000000..53075e4e16b4cd56fa22704ac72f3bfe856a850d --- /dev/null +++ b/kfkas/llama-2-koen-13b-SFT-LoRA-4bit/result_2023-11-30 12:50:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.01415063143511173, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137998 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4176458872734515, + "acc_stderr": 0.004921632645102382, + "acc_norm": 0.5644293965345548, + "acc_norm_stderr": 0.00494818136702496 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.038295098689947266, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.038295098689947266 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5325670498084292, + "acc_stderr": 0.017841995750520874, + "acc_norm": 0.5325670498084292, + "acc_norm_stderr": 0.017841995750520874 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.037932811853078105, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.037932811853078105 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3967741935483871, + "acc_stderr": 0.027831231605767955, + "acc_norm": 0.3967741935483871, + "acc_norm_stderr": 0.027831231605767955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5683760683760684, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.5683760683760684, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399419, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399419 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.02811092849280907, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.02811092849280907 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088844, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088844 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005344, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005344 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670736, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.011787910251664587, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.011787910251664587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826824, + "mc2": 0.41284235007226594, + "mc2_stderr": 0.014696062981475876 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.017067699774312984, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kfkas/llama-2-koen-13b-SFT-LoRA-4bit", + "model_sha": "e87f9090850cc807f722327cf888243761183090", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kfkas/my_test_LLM/result_2023-11-29 09:44:36.json b/kfkas/my_test_LLM/result_2023-11-29 09:44:36.json new file mode 100644 index 0000000000000000000000000000000000000000..7330634c056e9605a1ed12169d502b3b5cea8abe --- /dev/null +++ b/kfkas/my_test_LLM/result_2023-11-29 09:44:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.44368600682593856, + "acc_norm_stderr": 0.01451842182567044 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4206333399721171, + "acc_stderr": 0.004926518439372262, + "acc_norm": 0.5655247958573989, + "acc_norm_stderr": 0.004946748608271349 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.01782913176428718, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.01782913176428718 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796375, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796375 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215636, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215636 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119994, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119994 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.026720034380514995, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.026720034380514995 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607704, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489359, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489359 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5284403669724771, + "acc_stderr": 0.02140261569734804, + "acc_norm": 0.5284403669724771, + "acc_norm_stderr": 0.02140261569734804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510467998, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510467998 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.01962744474841224, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.01962744474841224 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755805, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755805 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.032259413526312945, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.032259413526312945 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330371, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330371 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.45020017328432316, + "mc2_stderr": 0.014998933368831563 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4781582054309327, + "acc_stderr": 0.017173944474294375, + "acc_norm": 0.5808736717827627, + "acc_norm_stderr": 0.016963995010862796 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kfkas/my_test_LLM", + "model_sha": "5457da887248f0209222ef3507e367f0b01cec9a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kfkas/yi-ko-SFT-LoRA/result_2023-12-04 02:30:47.json b/kfkas/yi-ko-SFT-LoRA/result_2023-12-04 02:30:47.json new file mode 100644 index 0000000000000000000000000000000000000000..b02311c765090529c5f215f5c4e18b8639591450 --- /dev/null +++ b/kfkas/yi-ko-SFT-LoRA/result_2023-12-04 02:30:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3464163822525597, + "acc_stderr": 0.013905011180063246, + "acc_norm": 0.4104095563139932, + "acc_norm_stderr": 0.014374922192642664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.398725353515236, + "acc_stderr": 0.004886353563571854, + "acc_norm": 0.5309699263095001, + "acc_norm_stderr": 0.004980200451851671 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5670498084291188, + "acc_stderr": 0.017718469101513982, + "acc_norm": 0.5670498084291188, + "acc_norm_stderr": 0.017718469101513982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016337, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016337 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.41721854304635764, + "acc_stderr": 0.04026141497634611, + "acc_norm": 0.41721854304635764, + "acc_norm_stderr": 0.04026141497634611 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.0350349092367328, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.0350349092367328 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.041124909746707884, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.041124909746707884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.03555300319557669, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.03555300319557669 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5981651376146789, + "acc_stderr": 0.02102010617299701, + "acc_norm": 0.5981651376146789, + "acc_norm_stderr": 0.02102010617299701 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626564, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626564 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375383, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861131, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861131 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468636, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468636 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928006, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3324641460234681, + "acc_stderr": 0.01203202233226052, + "acc_norm": 0.3324641460234681, + "acc_norm_stderr": 0.01203202233226052 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087305, + "mc2": 0.40810248555867834, + "mc2_stderr": 0.014955020241849247 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.01718765819933674, + "acc_norm": 0.6009445100354192, + "acc_norm_stderr": 0.016836377292849307 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kfkas/yi-ko-SFT-LoRA", + "model_sha": "8d8dffd8fd95b885d46d337d4d2a46319b76dde7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kiyoonyoo/ko-en-trans-platypus-13b-v2/result_2023-10-20 01:21:48.json b/kiyoonyoo/ko-en-trans-platypus-13b-v2/result_2023-10-20 01:21:48.json new file mode 100644 index 0000000000000000000000000000000000000000..6653ce0b4ec9f7ba5299f280e3f9e27bb2630c67 --- /dev/null +++ b/kiyoonyoo/ko-en-trans-platypus-13b-v2/result_2023-10-20 01:21:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.01407722310847014, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326021 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4064927305317666, + "acc_stderr": 0.004901747426331732, + "acc_norm": 0.5430193188607847, + "acc_norm_stderr": 0.004971278309204197 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.017865944827291615, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.017865944827291615 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840684, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840684 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347354, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347354 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.018521756215423027, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.018521756215423027 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.02667925227010311, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.02667925227010311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.011717148751648435, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.011717148751648435 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.40735838259254725, + "mc2_stderr": 0.0148987552825206 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4557260920897285, + "acc_stderr": 0.017122829143292648, + "acc_norm": 0.5194805194805194, + "acc_norm_stderr": 0.017177301992342544 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kiyoonyoo/ko-en-trans-platypus-13b-v2", + "model_sha": "d050d876d84bdce99f417f180479586cf0fe8a86", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kiyoonyoo/ko-en-trans-platypus-13b-v3/result_2023-10-22 06:24:03.json b/kiyoonyoo/ko-en-trans-platypus-13b-v3/result_2023-10-22 06:24:03.json new file mode 100644 index 0000000000000000000000000000000000000000..41adb4f98b5507787c4cde57d0d7c8e08609d0ca --- /dev/null +++ b/kiyoonyoo/ko-en-trans-platypus-13b-v3/result_2023-10-22 06:24:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37457337883959047, + "acc_stderr": 0.014144193471893456, + "acc_norm": 0.43430034129692835, + "acc_norm_stderr": 0.014484703048857355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4070902210714997, + "acc_stderr": 0.004902878806733046, + "acc_norm": 0.5408285202150966, + "acc_norm_stderr": 0.004973117975062488 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5197956577266922, + "acc_stderr": 0.017865944827291633, + "acc_norm": 0.5197956577266922, + "acc_norm_stderr": 0.017865944827291633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236153, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236153 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102318, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102318 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969566, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836914, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836914 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176095, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176095 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112126, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112126 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5394495412844037, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.5394495412844037, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223974, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223974 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952688, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952688 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35919165580182527, + "acc_stderr": 0.012253386187584245, + "acc_norm": 0.35919165580182527, + "acc_norm_stderr": 0.012253386187584245 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4457475184349363, + "mc2_stderr": 0.015091782961916999 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38134592680047225, + "acc_stderr": 0.016699301768828074, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kiyoonyoo/ko-en-trans-platypus-13b-v3", + "model_sha": "3c27d710886ff8b6a9fcf321fae0e2f76eaeafa3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kiyoonyoo/ko-en-trans-platypus-13b/result_2023-10-18 00:15:42.json b/kiyoonyoo/ko-en-trans-platypus-13b/result_2023-10-18 00:15:42.json new file mode 100644 index 0000000000000000000000000000000000000000..9b68f6e1c61525954b9517e5fd8e7e2743ca1c51 --- /dev/null +++ b/kiyoonyoo/ko-en-trans-platypus-13b/result_2023-10-18 00:15:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.01407722310847014, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326021 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4064927305317666, + "acc_stderr": 0.004901747426331732, + "acc_norm": 0.5430193188607847, + "acc_norm_stderr": 0.004971278309204197 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.017865944827291615, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.017865944827291615 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840684, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840684 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347354, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347354 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.018521756215423027, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.018521756215423027 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.02667925227010311, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.02667925227010311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.011717148751648435, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.011717148751648435 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.40735838259254725, + "mc2_stderr": 0.0148987552825206 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4557260920897285, + "acc_stderr": 0.017122829143292648, + "acc_norm": 0.5194805194805194, + "acc_norm_stderr": 0.017177301992342544 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kiyoonyoo/ko-en-trans-platypus-13b", + "model_sha": "a211ce8adabfe436e59735081efe813176a88e7b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kiyoonyoo/ko-platypus-13b-control/result_2023-10-17 01:13:41.json b/kiyoonyoo/ko-platypus-13b-control/result_2023-10-17 01:13:41.json new file mode 100644 index 0000000000000000000000000000000000000000..c163d0068d7075a4ba12ba81709cd73b3b543a74 --- /dev/null +++ b/kiyoonyoo/ko-platypus-13b-control/result_2023-10-17 01:13:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38822525597269625, + "acc_stderr": 0.01424161420741405, + "acc_norm": 0.4283276450511945, + "acc_norm_stderr": 0.01446049636759902 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40360485958972314, + "acc_stderr": 0.004896173035943316, + "acc_norm": 0.5388368850826528, + "acc_norm_stderr": 0.0049747064284342835 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5300127713920817, + "acc_stderr": 0.01784772308664907, + "acc_norm": 0.5300127713920817, + "acc_norm_stderr": 0.01784772308664907 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745667, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745667 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419034, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419034 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.02518914989476419, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.02518914989476419 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836918, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836918 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776292, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622841, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622841 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379414, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379414 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.02138786335035399, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.02138786335035399 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505518, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505518 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509317, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37614080834419816, + "acc_stderr": 0.012372214430599819, + "acc_norm": 0.37614080834419816, + "acc_norm_stderr": 0.012372214430599819 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4343237644069022, + "mc2_stderr": 0.015029108040608447 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3872491145218418, + "acc_stderr": 0.016747577991642792, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.0171427361176433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kiyoonyoo/ko-platypus-13b-control", + "model_sha": "6cdc49b0713c6d4ad656fe98f5be7eccb1d8b4ef", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12/result_2023-10-19 23:41:08.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12/result_2023-10-19 23:41:08.json new file mode 100644 index 0000000000000000000000000000000000000000..6ecbe81f82c405e0f188f76862f91ce7b2a39cc3 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12/result_2023-10-19 23:41:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938167, + "acc_norm": 0.45307167235494883, + "acc_norm_stderr": 0.014546892052005628 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40450109539932283, + "acc_stderr": 0.004897921845492103, + "acc_norm": 0.5380402310296754, + "acc_norm_stderr": 0.004975319435777095 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917056, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729245, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729245 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.016857391247472552, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.016857391247472552 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.028020226271200217, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.028020226271200217 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3762057877813505, + "acc_stderr": 0.027513925683549427, + "acc_norm": 0.3762057877813505, + "acc_norm_stderr": 0.027513925683549427 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786751, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786751 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533953, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533953 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24615384615384617, + "acc_stderr": 0.021840866990423088, + "acc_norm": 0.24615384615384617, + "acc_norm_stderr": 0.021840866990423088 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3387096774193548, + "acc_stderr": 0.02692344605930286, + "acc_norm": 0.3387096774193548, + "acc_norm_stderr": 0.02692344605930286 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03255326307272486, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03255326307272486 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.02661648298050172, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.02661648298050172 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946458, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946458 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.46766169154228854, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.46766169154228854, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.034355680560478746, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.034355680560478746 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3901734104046243, + "acc_stderr": 0.026261677607806636, + "acc_norm": 0.3901734104046243, + "acc_norm_stderr": 0.026261677607806636 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.037466683254700206, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.037466683254700206 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.02640614597362566, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.02640614597362566 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32124352331606215, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.32124352331606215, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30825688073394497, + "acc_stderr": 0.019798366698367268, + "acc_norm": 0.30825688073394497, + "acc_norm_stderr": 0.019798366698367268 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.026716118380156827, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.026716118380156827 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319771, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319771 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.01885008469646872, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.01885008469646872 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.037709700493470194, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.037709700493470194 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372948, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372948 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2324022346368715, + "acc_stderr": 0.014125968754673385, + "acc_norm": 0.2324022346368715, + "acc_norm_stderr": 0.014125968754673385 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20955882352941177, + "acc_stderr": 0.02472311040767705, + "acc_norm": 0.20955882352941177, + "acc_norm_stderr": 0.02472311040767705 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784617, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784617 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30182529335071706, + "acc_stderr": 0.01172435051810589, + "acc_norm": 0.30182529335071706, + "acc_norm_stderr": 0.01172435051810589 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.03374499356319354, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.03374499356319354 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323002, + "mc2": 0.47008540499028884, + "mc2_stderr": 0.015171096468571796 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40968122786304606, + "acc_stderr": 0.016907568192219474, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12", + "model_sha": "26d17aadd76e28b7226c206d1e5517b703b540fb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14/result_2023-10-22 01:18:39.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14/result_2023-10-22 01:18:39.json new file mode 100644 index 0000000000000000000000000000000000000000..3747e35f1a232e1b590ddb57f95746c4ff315ec3 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14/result_2023-10-22 01:18:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3779863481228669, + "acc_stderr": 0.014169664520303096, + "acc_norm": 0.4325938566552901, + "acc_norm_stderr": 0.014478005694182531 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4039036048595897, + "acc_stderr": 0.004896757857022551, + "acc_norm": 0.5393347938657638, + "acc_norm_stderr": 0.004974316807920405 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.049224241534589326, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.049224241534589326 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4495530012771392, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.4495530012771392, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484068, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484068 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621502, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621502 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876719, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876719 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.025088301454694834, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.025088301454694834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.03308530426228258, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.03308530426228258 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.02807158890109185, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.02807158890109185 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5854700854700855, + "acc_stderr": 0.03227396567623779, + "acc_norm": 0.5854700854700855, + "acc_norm_stderr": 0.03227396567623779 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871927, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871927 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.407514450867052, + "acc_stderr": 0.026454578146931498, + "acc_norm": 0.407514450867052, + "acc_norm_stderr": 0.026454578146931498 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.02743162372241502, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.02743162372241502 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.0193733324207245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.0193733324207245 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534785, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534785 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.033622774366080424, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.033622774366080424 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.01428834380392531, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.01428834380392531 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682485, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682485 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646563, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646563 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.44508082063982635, + "mc2_stderr": 0.014978253495446162 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40968122786304606, + "acc_stderr": 0.01690756819221947, + "acc_norm": 0.5112160566706021, + "acc_norm_stderr": 0.01718602846948929 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14", + "model_sha": "06b824795d8f7b9efa5cbe1c3a7b21e7c939bf8b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.2/result_2023-10-23 03:57:31.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.2/result_2023-10-23 03:57:31.json new file mode 100644 index 0000000000000000000000000000000000000000..1dd92a3ca437520edcfbb09accf7f18bcfd90286 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.2/result_2023-10-23 03:57:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407166, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.014521226405627074 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40679147580163316, + "acc_stderr": 0.004902314055725591, + "acc_norm": 0.5413264289982075, + "acc_norm_stderr": 0.004972708369656543 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.01584243083526942, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.01584243083526942 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.026355158413349417, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.026355158413349417 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.22289156626506024, + "acc_stderr": 0.032400048255946876, + "acc_norm": 0.22289156626506024, + "acc_norm_stderr": 0.032400048255946876 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.02679542232789394, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.02679542232789394 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.030216831011508755, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.030216831011508755 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596917, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596917 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932026, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932026 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936246, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936246 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380558, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380558 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.02102067268082791, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.02102067268082791 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3548387096774194, + "acc_stderr": 0.027218889773308753, + "acc_norm": 0.3548387096774194, + "acc_norm_stderr": 0.027218889773308753 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.02461829819586651, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02461829819586651 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.13, + "acc_stderr": 0.0337997668989631, + "acc_norm": 0.13, + "acc_norm_stderr": 0.0337997668989631 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36127167630057805, + "acc_stderr": 0.025862201852277895, + "acc_norm": 0.36127167630057805, + "acc_norm_stderr": 0.025862201852277895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900933, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900933 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.033088185944157515, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.033088185944157515 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25688073394495414, + "acc_stderr": 0.01873249292834245, + "acc_norm": 0.25688073394495414, + "acc_norm_stderr": 0.01873249292834245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.033954900208561116, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.033954900208561116 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.02625605383571896, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.02625605383571896 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.47107438016528924, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.01892608291608339, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.01892608291608339 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372948, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372948 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225606, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225606 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19117647058823528, + "acc_stderr": 0.02388688192244036, + "acc_norm": 0.19117647058823528, + "acc_norm_stderr": 0.02388688192244036 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584353, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803545, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803545 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.03608541011573967, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.03608541011573967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.44907946334045823, + "mc2_stderr": 0.015040408260408762 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3317591499409681, + "acc_stderr": 0.016187984642157316, + "acc_norm": 0.4203069657615112, + "acc_norm_stderr": 0.016970598281177706 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.2", + "model_sha": "116dea6c97133d0729b618bbe76cf650a92a90a8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8/result_2023-10-20 03:33:51.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8/result_2023-10-20 03:33:51.json new file mode 100644 index 0000000000000000000000000000000000000000..0297119a14837088f8ef2e8ef3122dd5fab9d8a0 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8/result_2023-10-20 03:33:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38822525597269625, + "acc_stderr": 0.014241614207414046, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955262 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40689105755825533, + "acc_stderr": 0.004902502514738606, + "acc_norm": 0.5412268472415853, + "acc_norm_stderr": 0.004972790690640187 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.0352821125824523, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.0352821125824523 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.36893203883495146, + "acc_stderr": 0.04777615181156739, + "acc_norm": 0.36893203883495146, + "acc_norm_stderr": 0.04777615181156739 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2822477650063857, + "acc_stderr": 0.016095302969878548, + "acc_norm": 0.2822477650063857, + "acc_norm_stderr": 0.016095302969878548 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.0281854413012341, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.0281854413012341 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3247588424437299, + "acc_stderr": 0.026596782287697043, + "acc_norm": 0.3247588424437299, + "acc_norm_stderr": 0.026596782287697043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.25112107623318386, + "acc_stderr": 0.029105220833224622, + "acc_norm": 0.25112107623318386, + "acc_norm_stderr": 0.029105220833224622 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03274287914026866, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03274287914026866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33076923076923076, + "acc_stderr": 0.023854795680971142, + "acc_norm": 0.33076923076923076, + "acc_norm_stderr": 0.023854795680971142 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694433, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694433 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.02637756702864586, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.02637756702864586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940588, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940588 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.025348097468097856, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.025348097468097856 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39303482587064675, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.39303482587064675, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.22486772486772486, + "acc_stderr": 0.02150209607822914, + "acc_norm": 0.22486772486772486, + "acc_norm_stderr": 0.02150209607822914 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.025190181327608415, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.025190181327608415 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.31790123456790126, + "acc_stderr": 0.025910063528240865, + "acc_norm": 0.31790123456790126, + "acc_norm_stderr": 0.025910063528240865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39896373056994816, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.39896373056994816, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29357798165137616, + "acc_stderr": 0.01952515112263966, + "acc_norm": 0.29357798165137616, + "acc_norm_stderr": 0.01952515112263966 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.026992544339297226, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.026992544339297226 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.017917974069594726, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.017917974069594726 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460994, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.03405702838185695, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.03405702838185695 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364545, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364545 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777562, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777562 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.02944377302259469, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.02944377302259469 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501933, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501933 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624335, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624335 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4600456246073735, + "mc2_stderr": 0.014958372484169768 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3600944510035419, + "acc_stderr": 0.01650368672044008, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8", + "model_sha": "424602efb3cb7b2c4e901d325113335c002a1da2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1/result_2023-10-17 12:52:13.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1/result_2023-10-17 12:52:13.json new file mode 100644 index 0000000000000000000000000000000000000000..755a504740ae9170bb3b150c61f6552c3c48cb31 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1/result_2023-10-17 12:52:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.014312094557946704, + "acc_norm": 0.46928327645051193, + "acc_norm_stderr": 0.014583792546304038 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4056960764787891, + "acc_stderr": 0.004900227226433389, + "acc_norm": 0.5419239195379406, + "acc_norm_stderr": 0.00497221024402057 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.046202840822800406, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.046202840822800406 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2822477650063857, + "acc_stderr": 0.016095302969878534, + "acc_norm": 0.2822477650063857, + "acc_norm_stderr": 0.016095302969878534 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.027321078417387533, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.027321078417387533 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944968, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944968 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3762057877813505, + "acc_stderr": 0.027513925683549427, + "acc_norm": 0.3762057877813505, + "acc_norm_stderr": 0.027513925683549427 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2556053811659193, + "acc_stderr": 0.029275891003969923, + "acc_norm": 0.2556053811659193, + "acc_norm_stderr": 0.029275891003969923 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.036951833116502325, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.036951833116502325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.029597329730978082, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.029597329730978082 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.29743589743589743, + "acc_stderr": 0.023177408131465942, + "acc_norm": 0.29743589743589743, + "acc_norm_stderr": 0.023177408131465942 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642749, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642749 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3548387096774194, + "acc_stderr": 0.027218889773308757, + "acc_norm": 0.3548387096774194, + "acc_norm_stderr": 0.027218889773308757 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03088273697413865, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03088273697413865 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889051996, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889051996 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.417910447761194, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.417910447761194, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.021679219663693152, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.021679219663693152 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.407514450867052, + "acc_stderr": 0.0264545781469315, + "acc_norm": 0.407514450867052, + "acc_norm_stderr": 0.0264545781469315 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.02640614597362566, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.02640614597362566 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295341, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295341 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3192660550458716, + "acc_stderr": 0.01998782906975, + "acc_norm": 0.3192660550458716, + "acc_norm_stderr": 0.01998782906975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790605, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790605 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.026643278474508748, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.026643278474508748 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.018875682938069443, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.018875682938069443 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560534, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.38396624472573837, + "acc_stderr": 0.03165867806410668, + "acc_norm": 0.38396624472573837, + "acc_norm_stderr": 0.03165867806410668 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30247718383311606, + "acc_stderr": 0.011731524234165706, + "acc_norm": 0.30247718383311606, + "acc_norm_stderr": 0.011731524234165706 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945432, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945432 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.40606060606060607, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.40606060606060607, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.016451264440068246, + "mc2": 0.4905950778856991, + "mc2_stderr": 0.01526052031524314 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3305785123966942, + "acc_stderr": 0.0161734232988457, + "acc_norm": 0.4037780401416765, + "acc_norm_stderr": 0.016869031540298635 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1", + "model_sha": "f0e5e0f218635b4dd43f0ba2b3b4cd5007967625", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.1/result_2023-10-25 14:30:21.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.1/result_2023-10-25 14:30:21.json new file mode 100644 index 0000000000000000000000000000000000000000..b0588c628d77836967d1614d9ad0f9660e7d6c8b --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.1/result_2023-10-25 14:30:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.013975454122756557, + "acc_norm": 0.3993174061433447, + "acc_norm_stderr": 0.014312094557946705 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4021111332403904, + "acc_stderr": 0.004893220635011784, + "acc_norm": 0.536247759410476, + "acc_norm_stderr": 0.00497665198975764 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.01781438523853443, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.01781438523853443 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231004, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34080717488789236, + "acc_stderr": 0.03181149747055358, + "acc_norm": 0.34080717488789236, + "acc_norm_stderr": 0.03181149747055358 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.039417076320648906, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.039417076320648906 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938145, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938145 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.0281291127091659, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.0281291127091659 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.0302850092590098, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.0302850092590098 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149152, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149152 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.026296227915613663, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.026296227915613663 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160667, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.027996723180631466, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.027996723180631466 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092484, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092484 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.015024083883322869, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.015024083883322869 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.02957326913441112, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.02957326913441112 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585895, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585895 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253597, + "mc2": 0.4170988801266876, + "mc2_stderr": 0.015242823678966766 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3612750885478158, + "acc_stderr": 0.016515463022412014, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.017185069732676524 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.1", + "model_sha": "37db0cf6282e151ecc013b98fda871ce486e52c3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.2/result_2023-10-26 11:07:01.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.2/result_2023-10-26 11:07:01.json new file mode 100644 index 0000000000000000000000000000000000000000..7a2cdb86aea633c86c98fcce7f82ae2344fb34de --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.2/result_2023-10-26 11:07:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979274, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471625 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41575383389762993, + "acc_stderr": 0.004918442328872009, + "acc_norm": 0.5518820952001593, + "acc_norm_stderr": 0.0049628462061255 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370606, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370606 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5363984674329502, + "acc_stderr": 0.017832524079593265, + "acc_norm": 0.5363984674329502, + "acc_norm_stderr": 0.017832524079593265 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638629, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638629 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317227, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317227 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.0260671592222758, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.0260671592222758 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436774, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436774 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.026918645383239015, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.026918645383239015 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833925, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833925 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.02852638345214264, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.02852638345214264 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354143, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354143 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.03022522616001242, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.03022522616001242 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29720670391061454, + "acc_stderr": 0.015285313353641599, + "acc_norm": 0.29720670391061454, + "acc_norm_stderr": 0.015285313353641599 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.02747227447323382, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.02747227447323382 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897628, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897628 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062788, + "mc2": 0.4471086568861838, + "mc2_stderr": 0.015281241232491133 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5466351829988194, + "acc_stderr": 0.01711541822522687, + "acc_norm": 0.602125147579693, + "acc_norm_stderr": 0.016827959054733395 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.2", + "model_sha": "b8290fa1d56a9ff58d2fecf3f8edd7058eb85502", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.3/result_2023-10-28 06:04:41.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.3/result_2023-10-28 06:04:41.json new file mode 100644 index 0000000000000000000000000000000000000000..53d4e1cd8b7bec171e9033b4eb54ca74667bb45d --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.3/result_2023-10-28 06:04:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251095, + "acc_norm": 0.44112627986348124, + "acc_norm_stderr": 0.014509747749064664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4186417048396734, + "acc_stderr": 0.00492328184182851, + "acc_norm": 0.5557657837084247, + "acc_norm_stderr": 0.004958649623815337 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.524904214559387, + "acc_stderr": 0.017857770704901035, + "acc_norm": 0.524904214559387, + "acc_norm_stderr": 0.017857770704901035 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840622, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828061, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376882, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376882 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327228, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327228 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5614678899082569, + "acc_stderr": 0.02127471307395456, + "acc_norm": 0.5614678899082569, + "acc_norm_stderr": 0.02127471307395456 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.0285803410651383, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.0285803410651383 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486635, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320207, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320207 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761992, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761992 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3546284224250326, + "acc_stderr": 0.012218576439090172, + "acc_norm": 0.3546284224250326, + "acc_norm_stderr": 0.012218576439090172 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326905, + "mc2": 0.441499711570202, + "mc2_stderr": 0.015207137327045393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5596221959858324, + "acc_norm_stderr": 0.017067699774312967 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.3", + "model_sha": "3444f841fe050a22a95a166ac953f92047e2c411", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.4/result_2023-10-28 05:50:19.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.4/result_2023-10-28 05:50:19.json new file mode 100644 index 0000000000000000000000000000000000000000..057edd92004604a095f4cf37e6ddef6d94c5a5b6 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.4/result_2023-10-28 05:50:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.01428052266746733, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955262 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4221270663214499, + "acc_stderr": 0.004928891895874289, + "acc_norm": 0.5546703843855806, + "acc_norm_stderr": 0.00495986429917813 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.017879248970584356, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.017879248970584356 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894255, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894255 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215636, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215636 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684973, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507748, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507748 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.02680372058320618, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.02680372058320618 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194045, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214338, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.040073418097558065, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.040073418097558065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.0305467452649532, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.0305467452649532 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163908, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163908 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29726205997392435, + "acc_stderr": 0.01167334617308604, + "acc_norm": 0.29726205997392435, + "acc_norm_stderr": 0.01167334617308604 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431855, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431855 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.47405944536561195, + "mc2_stderr": 0.015299344788205708 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5206611570247934, + "acc_stderr": 0.01717567127983645, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.4", + "model_sha": "80bc09f73579e2bc021f084424f761f81e4f808d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.6/result_2023-10-30 01:00:17.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.6/result_2023-10-30 01:00:17.json new file mode 100644 index 0000000000000000000000000000000000000000..bde0c51f9ed95c2a24d52cb46eb39e5682f0c0b6 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.6/result_2023-10-30 01:00:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39761092150170646, + "acc_stderr": 0.014301752223279545, + "acc_norm": 0.4641638225255973, + "acc_norm_stderr": 0.01457381366473572 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4228241386178052, + "acc_stderr": 0.00492998369279506, + "acc_norm": 0.569308902609042, + "acc_norm_stderr": 0.004941609820763588 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.017753396973908497, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.017753396973908497 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5761467889908257, + "acc_stderr": 0.021187263209087516, + "acc_norm": 0.5761467889908257, + "acc_norm_stderr": 0.021187263209087516 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713671, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713671 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34876140808344197, + "acc_stderr": 0.012172035157127115, + "acc_norm": 0.34876140808344197, + "acc_norm_stderr": 0.012172035157127115 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.419680831083004, + "mc2_stderr": 0.014665587466952046 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4899645808736718, + "acc_stderr": 0.017186891286894053, + "acc_norm": 0.5832349468713105, + "acc_norm_stderr": 0.016950489146108815 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.6", + "model_sha": "951581c0603be594b823e8df208a4e6e307d413f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.8/result_2023-11-01 00:45:56.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.8/result_2023-11-01 00:45:56.json new file mode 100644 index 0000000000000000000000000000000000000000..5f83e3a8aab0ec443586c3f255ec0e3623b47fac --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.8/result_2023-11-01 00:45:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4035836177474403, + "acc_stderr": 0.014337158914268434, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955264 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42561242780322645, + "acc_stderr": 0.004934250390879783, + "acc_norm": 0.5671181039633539, + "acc_norm_stderr": 0.004944620712318275 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5568326947637292, + "acc_stderr": 0.017764085035348418, + "acc_norm": 0.5568326947637292, + "acc_norm_stderr": 0.017764085035348418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936246, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936246 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502744, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502744 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.03193705726200293, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.03193705726200293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718324, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718324 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523857, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523857 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748139, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748139 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.021162420048273508, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.021162420048273508 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423556, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423556 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316494, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316494 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.45429869166378484, + "mc2_stderr": 0.01501407718716332 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45808736717827625, + "acc_stderr": 0.017129852117911147, + "acc_norm": 0.5974025974025974, + "acc_norm_stderr": 0.016861020486407762 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.8", + "model_sha": "6608bbae2d57a5056ba8d5c82a7ecf8be4a640d6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4/result_2023-10-13 21:03:52.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4/result_2023-10-13 21:03:52.json new file mode 100644 index 0000000000000000000000000000000000000000..bbf69898a3feaea19bb3ab5f1506bb518009a00c --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4/result_2023-10-13 21:03:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3720136518771331, + "acc_stderr": 0.014124597881844466, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804243 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40938060147381, + "acc_stderr": 0.004907146229347555, + "acc_norm": 0.5426209918342959, + "acc_norm_stderr": 0.004971619995879755 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.047504583990416925, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.047504583990416925 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4891443167305236, + "acc_stderr": 0.017875748840242418, + "acc_norm": 0.4891443167305236, + "acc_norm_stderr": 0.017875748840242418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357783, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357783 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484634, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484634 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.43, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.030066761582977924, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.030066761582977924 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.024283140529467284, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.024283140529467284 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3774193548387097, + "acc_stderr": 0.027575960723278243, + "acc_norm": 0.3774193548387097, + "acc_norm_stderr": 0.027575960723278243 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051622, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051622 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.029224526469124792, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.029224526469124792 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473836, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473836 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739244, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02141168439369419, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02141168439369419 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.026424816594009845, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.026424816594009845 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.03561587327685883, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.03561587327685883 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3669724770642202, + "acc_stderr": 0.020664675659520532, + "acc_norm": 0.3669724770642202, + "acc_norm_stderr": 0.020664675659520532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.02742047766262923, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.02742047766262923 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223977, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777565, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777565 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484385, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484385 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.46835443037974683, + "acc_stderr": 0.03248197400511075, + "acc_norm": 0.46835443037974683, + "acc_norm_stderr": 0.03248197400511075 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28226857887874834, + "acc_stderr": 0.011495852176241954, + "acc_norm": 0.28226857887874834, + "acc_norm_stderr": 0.011495852176241954 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589678, + "mc2": 0.4528465622549083, + "mc2_stderr": 0.015125783674090152 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3707201889020071, + "acc_stderr": 0.01660580128921261, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.017185069732676538 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4", + "model_sha": "26ef51b65661f5762efa36aadf56a7c3820e6762", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.1/result_2023-11-28 10:51:14.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.1/result_2023-11-28 10:51:14.json new file mode 100644 index 0000000000000000000000000000000000000000..0444e5ab71e633884a47669ef84c17c5e160aae6 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.1/result_2023-11-28 10:51:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40017064846416384, + "acc_stderr": 0.014317197787809167, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019669 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4239195379406493, + "acc_stderr": 0.004931679059919374, + "acc_norm": 0.5702051384186417, + "acc_norm_stderr": 0.004940349676769318 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533243, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533243 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028337, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028337 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.02683080599895223, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.02683080599895223 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.021162420048273504, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.021162420048273504 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.01989841271763588, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.01989841271763588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.02878222756134725, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.02878222756134725 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.01226793547751903, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.01226793547751903 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431855, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431855 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.3982113417748825, + "mc2_stderr": 0.014654617642929734 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4498229043683589, + "acc_stderr": 0.01710357334382571, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v7.1", + "model_sha": "b267bfc53b065e35c632ab140268615abc3e9fbf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.2/result_2023-11-28 11:08:56.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.2/result_2023-11-28 11:08:56.json new file mode 100644 index 0000000000000000000000000000000000000000..0a422b8c48ca8d6e5a67cade527f376cd23e683e --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.2/result_2023-11-28 11:08:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40784982935153585, + "acc_stderr": 0.014361097288449714, + "acc_norm": 0.47696245733788395, + "acc_norm_stderr": 0.014595873205358273 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4289982075283808, + "acc_stderr": 0.0049392156821917695, + "acc_norm": 0.5819557857000598, + "acc_norm_stderr": 0.004922294797766663 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5568326947637292, + "acc_stderr": 0.017764085035348418, + "acc_norm": 0.5568326947637292, + "acc_norm_stderr": 0.017764085035348418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.034889016168527305, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.034889016168527305 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686855, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.030197611600197953, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.030197611600197953 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.0236369759961018, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.0236369759961018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5669724770642202, + "acc_stderr": 0.021244146569074352, + "acc_norm": 0.5669724770642202, + "acc_norm_stderr": 0.021244146569074352 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510467998, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510467998 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.020017629214213104, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.020017629214213104 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312549, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312549 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35984354628422427, + "acc_stderr": 0.012258260483689797, + "acc_norm": 0.35984354628422427, + "acc_norm_stderr": 0.012258260483689797 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041864, + "mc2": 0.3783182251954889, + "mc2_stderr": 0.01466893499051619 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42621015348288077, + "acc_stderr": 0.017002122609489252, + "acc_norm": 0.4911452184179457, + "acc_norm_stderr": 0.01718765819933674 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v7.2", + "model_sha": "5f5876fd03a477fadca9ac16760a5a1ec8d0acb3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.3/result_2023-11-28 11:18:44.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.3/result_2023-11-28 11:18:44.json new file mode 100644 index 0000000000000000000000000000000000000000..874b52d1e477e1f64ad2ec8434a84adf47eaf777 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.3/result_2023-11-28 11:18:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2901023890784983, + "acc_stderr": 0.013261573677520774, + "acc_norm": 0.36177474402730375, + "acc_norm_stderr": 0.014041957945038075 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3867755427205736, + "acc_stderr": 0.004860162076330979, + "acc_norm": 0.5024895439155547, + "acc_norm_stderr": 0.004989719559439899 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0356507967070831, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0356507967070831 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.017570705239256544, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.017570705239256544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785139, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785139 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288086, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288086 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677698, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677698 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3686868686868687, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.3686868686868687, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38064516129032255, + "acc_stderr": 0.02762171783290704, + "acc_norm": 0.38064516129032255, + "acc_norm_stderr": 0.02762171783290704 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.26495726495726496, + "acc_stderr": 0.028911208802749493, + "acc_norm": 0.26495726495726496, + "acc_norm_stderr": 0.028911208802749493 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3660377358490566, + "acc_stderr": 0.02964781353936524, + "acc_norm": 0.3660377358490566, + "acc_norm_stderr": 0.02964781353936524 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724136, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724136 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3482587064676617, + "acc_stderr": 0.03368787466115459, + "acc_norm": 0.3482587064676617, + "acc_norm_stderr": 0.03368787466115459 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491841, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491841 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2861271676300578, + "acc_stderr": 0.02433214677913413, + "acc_norm": 0.2861271676300578, + "acc_norm_stderr": 0.02433214677913413 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2993827160493827, + "acc_stderr": 0.025483115601195462, + "acc_norm": 0.2993827160493827, + "acc_norm_stderr": 0.025483115601195462 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3853211009174312, + "acc_stderr": 0.02086585085279413, + "acc_norm": 0.3853211009174312, + "acc_norm_stderr": 0.02086585085279413 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.026568921015457155, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.026568921015457155 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.042059539338841254, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.042059539338841254 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.033911609343436025, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.033911609343436025 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.01843342764940189, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.01843342764940189 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952686, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952686 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369922, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369922 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.028245687391462913, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.028245687391462913 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.03055531675557364, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.03055531675557364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.34177215189873417, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.34177215189873417, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26401564537157757, + "acc_stderr": 0.011258435537723837, + "acc_norm": 0.26401564537157757, + "acc_norm_stderr": 0.011258435537723837 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456411, + "mc2": 0.38387267624394456, + "mc2_stderr": 0.014963849444289972 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3565525383707202, + "acc_stderr": 0.016467706981527455, + "acc_norm": 0.45690672963400236, + "acc_norm_stderr": 0.017126389093086784 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v7.3", + "model_sha": "2202330599ea1a5a07ee9a472a27b7e22cba0cf1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v7/result_2023-10-17 07:32:46.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7/result_2023-10-17 07:32:46.json new file mode 100644 index 0000000000000000000000000000000000000000..066c07fcf00ba4168ac5e0aaac05eee716cd4bed --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7/result_2023-10-17 07:32:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38139931740614336, + "acc_stderr": 0.014194389086685251, + "acc_norm": 0.45307167235494883, + "acc_norm_stderr": 0.014546892052005628 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41057558255327625, + "acc_stderr": 0.004909328992915067, + "acc_norm": 0.5488946425014938, + "acc_norm_stderr": 0.004965866098318175 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.046202840822800406, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.046202840822800406 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.31417624521072796, + "acc_stderr": 0.016599291735884893, + "acc_norm": 0.31417624521072796, + "acc_norm_stderr": 0.016599291735884893 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996793, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996793 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02850485647051419, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02850485647051419 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3954983922829582, + "acc_stderr": 0.027770918531427834, + "acc_norm": 0.3954983922829582, + "acc_norm_stderr": 0.027770918531427834 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.29596412556053814, + "acc_stderr": 0.03063659134869982, + "acc_norm": 0.29596412556053814, + "acc_norm_stderr": 0.03063659134869982 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.03318477333845332, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.03318477333845332 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.037528339580033376, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.037528339580033376 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.09803921568627451, + "acc_stderr": 0.029589188531613252, + "acc_norm": 0.09803921568627451, + "acc_norm_stderr": 0.029589188531613252 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.02971914287634286, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.02971914287634286 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2743589743589744, + "acc_stderr": 0.022622765767493214, + "acc_norm": 0.2743589743589744, + "acc_norm_stderr": 0.022622765767493214 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642748, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642748 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34838709677419355, + "acc_stderr": 0.027104826328100944, + "acc_norm": 0.34838709677419355, + "acc_norm_stderr": 0.027104826328100944 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.37606837606837606, + "acc_stderr": 0.031733936329694824, + "acc_norm": 0.37606837606837606, + "acc_norm_stderr": 0.031733936329694824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724046, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724046 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267438, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267438 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.12, + "acc_stderr": 0.03265986323710906, + "acc_norm": 0.12, + "acc_norm_stderr": 0.03265986323710906 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.0360251131880677, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.0360251131880677 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35802469135802467, + "acc_stderr": 0.0266756119260371, + "acc_norm": 0.35802469135802467, + "acc_norm_stderr": 0.0266756119260371 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104281, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104281 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3174311926605505, + "acc_stderr": 0.019957152198460504, + "acc_norm": 0.3174311926605505, + "acc_norm_stderr": 0.019957152198460504 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848876, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848876 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.02656892101545716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.02656892101545716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4380165289256198, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351586, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351586 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.01877168389352817, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.01877168389352817 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509317, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046944, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046944 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02518778666022727, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02518778666022727 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.43037974683544306, + "acc_stderr": 0.032230171959375976, + "acc_norm": 0.43037974683544306, + "acc_norm_stderr": 0.032230171959375976 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29791395045632335, + "acc_stderr": 0.011680717340400057, + "acc_norm": 0.29791395045632335, + "acc_norm_stderr": 0.011680717340400057 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627884, + "mc2": 0.43717065836326097, + "mc2_stderr": 0.014982579691917674 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.016114023894800322, + "acc_norm": 0.3754427390791027, + "acc_norm_stderr": 0.016648411589511088 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v7", + "model_sha": "c0836cce043af8ee88da9cb52b2032d3fa8c5ddd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/CoT-llama-2k-7b/result_2023-09-27 16:26:44.json b/kyujinpy/CoT-llama-2k-7b/result_2023-09-27 16:26:44.json new file mode 100644 index 0000000000000000000000000000000000000000..b8b7e4919429e6627141ed1d7b4749b3a921de26 --- /dev/null +++ b/kyujinpy/CoT-llama-2k-7b/result_2023-09-27 16:26:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.3677474402730375, + "acc_norm_stderr": 0.014090995618168484 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3822943636725752, + "acc_stderr": 0.004849547819134473, + "acc_norm": 0.4938259310894244, + "acc_norm_stderr": 0.00498940098472222 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3486590038314176, + "acc_stderr": 0.017041243143490946, + "acc_norm": 0.3486590038314176, + "acc_norm_stderr": 0.017041243143490946 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386705, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386705 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3787878787878788, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.3787878787878788, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138622, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138622 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.028942004040998167, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.028942004040998167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233484, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233484 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671746, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671746 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.028637235639800935, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.028637235639800935 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.32338308457711445, + "acc_stderr": 0.03307615947979033, + "acc_norm": 0.32338308457711445, + "acc_norm_stderr": 0.03307615947979033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.032166008088022675, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.032166008088022675 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3265895953757225, + "acc_stderr": 0.02524826477424284, + "acc_norm": 0.3265895953757225, + "acc_norm_stderr": 0.02524826477424284 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32098765432098764, + "acc_stderr": 0.025976566010862744, + "acc_norm": 0.32098765432098764, + "acc_norm_stderr": 0.025976566010862744 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3724770642201835, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.3724770642201835, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.027420477662629245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.027420477662629245 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484577, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484577 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.02689170942834396, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.02689170942834396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841196, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.02879518557429129, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.02879518557429129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.03121956944530184, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.03121956944530184 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516076, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091708, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091708 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.01510240479735965, + "mc2": 0.3775578914340665, + "mc2_stderr": 0.014769349915486594 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21605667060212513, + "acc_stderr": 0.014149496716043137, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.015840538932534103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/CoT-llama-2k-7b", + "model_sha": "67fb09946bc99c9ba5f97b8675e27d217b353280", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KO-Platypus2-13B/result_2023-10-05 18:34:43.json b/kyujinpy/KO-Platypus2-13B/result_2023-10-05 18:34:43.json new file mode 100644 index 0000000000000000000000000000000000000000..792482e69d35acf3ea82024b5b762dc3ad807866 --- /dev/null +++ b/kyujinpy/KO-Platypus2-13B/result_2023-10-05 18:34:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910471, + "acc_norm": 0.44197952218430037, + "acc_norm_stderr": 0.014512682523128345 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40818562039434375, + "acc_stderr": 0.004904933500255867, + "acc_norm": 0.5431189006174069, + "acc_norm_stderr": 0.0049711923872024465 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.017855434554042, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.017855434554042 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.032061837832361516, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.032061837832361516 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710852, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710852 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303128, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194048, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194048 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.021393071222680804, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.021393071222680804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490437, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490437 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.019291961895066382, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.019291961895066382 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611317, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966346, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966346 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37353324641460234, + "acc_stderr": 0.012354994823515274, + "acc_norm": 0.37353324641460234, + "acc_norm_stderr": 0.012354994823515274 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006514, + "mc2": 0.44412739310048044, + "mc2_stderr": 0.015229602209106612 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.37662337662337664, + "acc_stderr": 0.01665879987405197, + "acc_norm": 0.42621015348288077, + "acc_norm_stderr": 0.01700212260948926 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KO-Platypus2-13B", + "model_sha": "7c01146d2de47036b18a99357ea4ea80c3cfebf5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KO-Platypus2-7B-ex/result_2023-09-27 11:27:48.json b/kyujinpy/KO-Platypus2-7B-ex/result_2023-09-27 11:27:48.json new file mode 100644 index 0000000000000000000000000000000000000000..32a44e034230999f3f8ae70f25f5360de4517aeb --- /dev/null +++ b/kyujinpy/KO-Platypus2-7B-ex/result_2023-09-27 11:27:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3302047781569966, + "acc_stderr": 0.013743085603760427, + "acc_norm": 0.39078498293515357, + "acc_norm_stderr": 0.014258563880513778 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3868751244771958, + "acc_stderr": 0.004860393011974673, + "acc_norm": 0.5085640310695081, + "acc_norm_stderr": 0.004989049430391292 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38697318007662834, + "acc_stderr": 0.017417138059440146, + "acc_norm": 0.38697318007662834, + "acc_norm_stderr": 0.017417138059440146 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628817, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628817 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.037400593820293204, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.037400593820293204 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.027559949802347813, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.027559949802347813 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2846153846153846, + "acc_stderr": 0.022878322799706294, + "acc_norm": 0.2846153846153846, + "acc_norm_stderr": 0.022878322799706294 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33548387096774196, + "acc_stderr": 0.026860206444724342, + "acc_norm": 0.33548387096774196, + "acc_norm_stderr": 0.026860206444724342 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.47863247863247865, + "acc_stderr": 0.032726164476349545, + "acc_norm": 0.47863247863247865, + "acc_norm_stderr": 0.032726164476349545 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33962264150943394, + "acc_stderr": 0.029146904747798356, + "acc_norm": 0.33962264150943394, + "acc_norm_stderr": 0.029146904747798356 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.046075820907199756, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.046075820907199756 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.02271746789770861, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.02271746789770861 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3765432098765432, + "acc_stderr": 0.026959344518747794, + "acc_norm": 0.3765432098765432, + "acc_norm_stderr": 0.026959344518747794 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4036697247706422, + "acc_stderr": 0.021035704856574963, + "acc_norm": 0.4036697247706422, + "acc_norm_stderr": 0.021035704856574963 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891776, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891776 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.018950886770806308, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.018950886770806308 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.031251275910891656, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.031251275910891656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.01197767670471599, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.01197767670471599 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674119, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674119 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.03756335775187897, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.03756335775187897 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752334, + "mc2": 0.3794460140456843, + "mc2_stderr": 0.014936611984494383 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2066115702479339, + "acc_stderr": 0.013919866463909341, + "acc_norm": 0.2987012987012987, + "acc_norm_stderr": 0.015735657391438278 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KO-Platypus2-7B-ex", + "model_sha": "63ad569198c7fabc62f292604211fed3435b3f48", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KOR-Orca-Platypus-13B-v2/result_2023-11-10 07:29:20.json b/kyujinpy/KOR-Orca-Platypus-13B-v2/result_2023-11-10 07:29:20.json new file mode 100644 index 0000000000000000000000000000000000000000..880218cff3036be318cf9ec67a68babd7e1b3832 --- /dev/null +++ b/kyujinpy/KOR-Orca-Platypus-13B-v2/result_2023-11-10 07:29:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938167, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804248 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4082852021509659, + "acc_stderr": 0.00490511903984946, + "acc_norm": 0.5443138816968731, + "acc_norm_stderr": 0.004970145708187994 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5070242656449553, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.5070242656449553, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464242, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464242 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.03078373675774564, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.03078373675774564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.03286745312567961, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.03286745312567961 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.040131241954243856, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.040131241954243856 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.033864057460620905, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.033864057460620905 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561053, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561053 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.035333892347392454, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.035333892347392454 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730557, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730557 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5119266055045871, + "acc_stderr": 0.021431223617362223, + "acc_norm": 0.5119266055045871, + "acc_norm_stderr": 0.021431223617362223 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171563, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171563 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.01913994374848703, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.01913994374848703 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190735, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190735 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823062997, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823062997 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237028, + "mc2": 0.41635797039765154, + "mc2_stderr": 0.015043272865517212 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5596221959858324, + "acc_stderr": 0.017067699774312967, + "acc_norm": 0.6505312868949232, + "acc_norm_stderr": 0.01639279708576985 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KOR-Orca-Platypus-13B-v2", + "model_sha": "a7466c0b153313306597a1f3abee65a9ee73869e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KOR-Orca-Platypus-13B-v3/result_2023-11-12 12:48:05.json b/kyujinpy/KOR-Orca-Platypus-13B-v3/result_2023-11-12 12:48:05.json new file mode 100644 index 0000000000000000000000000000000000000000..85e918295cf9b1e1c41a466e35f09a9bf5272dc7 --- /dev/null +++ b/kyujinpy/KOR-Orca-Platypus-13B-v3/result_2023-11-12 12:48:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3924914675767918, + "acc_stderr": 0.014269634635670722, + "acc_norm": 0.4377133105802048, + "acc_norm_stderr": 0.014497573881108282 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4113722366062537, + "acc_stderr": 0.004910767540867421, + "acc_norm": 0.5427205735909182, + "acc_norm_stderr": 0.004971534874389945 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.048979577377811674, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.048979577377811674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633938, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357773, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357773 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.025254485424799605, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.025254485424799605 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998576, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998576 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983693, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983693 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945266, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945266 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.03487558640462063, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.03487558640462063 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539288, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539288 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5431192660550459, + "acc_stderr": 0.021357458785226217, + "acc_norm": 0.5431192660550459, + "acc_norm_stderr": 0.021357458785226217 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236397, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.019162418588623553, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.019162418588623553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.01198993664066654, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.01198993664066654 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456416, + "mc2": 0.3858413065485574, + "mc2_stderr": 0.014719494606316911 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.538370720188902, + "acc_stderr": 0.017139660221845564, + "acc_norm": 0.6257378984651711, + "acc_norm_stderr": 0.016637917789798746 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KOR-Orca-Platypus-13B-v3", + "model_sha": "249ae0349d4c536d33d68d9d36946b1abd76c80f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Ko-PlatYi-6B-O/result_2023-12-04 08:36:16.json b/kyujinpy/Ko-PlatYi-6B-O/result_2023-12-04 08:36:16.json new file mode 100644 index 0000000000000000000000000000000000000000..1fb14c7adf47229962b38a8da4fa0acd47719968 --- /dev/null +++ b/kyujinpy/Ko-PlatYi-6B-O/result_2023-12-04 08:36:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.01411129875167495, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.01448798619718605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40131447918741286, + "acc_stderr": 0.004891626718097273, + "acc_norm": 0.5359490141406095, + "acc_norm_stderr": 0.004976867796583556 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5798212005108557, + "acc_stderr": 0.017650651363078026, + "acc_norm": 0.5798212005108557, + "acc_norm_stderr": 0.017650651363078026 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6565656565656566, + "acc_stderr": 0.033832012232444426, + "acc_norm": 0.6565656565656566, + "acc_norm_stderr": 0.033832012232444426 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524582, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524582 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.423841059602649, + "acc_stderr": 0.04034846678603397, + "acc_norm": 0.423841059602649, + "acc_norm_stderr": 0.04034846678603397 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.02446442662559643, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.02446442662559643 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5346820809248555, + "acc_stderr": 0.02685425792825888, + "acc_norm": 0.5346820809248555, + "acc_norm_stderr": 0.02685425792825888 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6321243523316062, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.6321243523316062, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958215, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958215 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6458715596330276, + "acc_stderr": 0.0205047290138291, + "acc_norm": 0.6458715596330276, + "acc_norm_stderr": 0.0205047290138291 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557836, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557836 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02855582751652879, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02855582751652879 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.020054269200726452, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.020054269200726452 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28268156424581004, + "acc_stderr": 0.015060381730018104, + "acc_norm": 0.28268156424581004, + "acc_norm_stderr": 0.015060381730018104 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34159061277705344, + "acc_stderr": 0.012112391320842842, + "acc_norm": 0.34159061277705344, + "acc_norm_stderr": 0.012112391320842842 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187897, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187897 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015002, + "mc2": 0.4101339837114337, + "mc2_stderr": 0.014973082840461931 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5395513577331759, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.5938606847697757, + "acc_norm_stderr": 0.0168847495031914 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Ko-PlatYi-6B-O", + "model_sha": "3774765323b0aa133fbf0aac2b600662619143c2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Ko-PlatYi-6B-gu/result_2023-12-03 20:06:11.json b/kyujinpy/Ko-PlatYi-6B-gu/result_2023-12-03 20:06:11.json new file mode 100644 index 0000000000000000000000000000000000000000..3af13ef2b021755e6e8e82f58702a547806abc19 --- /dev/null +++ b/kyujinpy/Ko-PlatYi-6B-gu/result_2023-12-03 20:06:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142824, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650645 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40310695080661224, + "acc_stderr": 0.004895194143892681, + "acc_norm": 0.5400318661621191, + "acc_norm_stderr": 0.004973762948302801 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.561941251596424, + "acc_stderr": 0.017742232238257244, + "acc_norm": 0.561941251596424, + "acc_norm_stderr": 0.017742232238257244 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542125, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542125 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883232, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883232 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.041124909746707884, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.041124909746707884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6165137614678899, + "acc_stderr": 0.020847156641915984, + "acc_norm": 0.6165137614678899, + "acc_norm_stderr": 0.020847156641915984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786164, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786164 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280065, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280065 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214936, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214936 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.034956245220154766, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.034956245220154766 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875828, + "mc2": 0.41224631631998104, + "mc2_stderr": 0.014851766323164695 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5312868949232585, + "acc_stderr": 0.017156666859785466, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.016756921571069422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Ko-PlatYi-6B-gu", + "model_sha": "bc972ad5d404c634de847af30c1e4e665f18e939", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Ko-PlatYi-6B-kiwi/result_2023-12-03 21:11:04.json b/kyujinpy/Ko-PlatYi-6B-kiwi/result_2023-12-03 21:11:04.json new file mode 100644 index 0000000000000000000000000000000000000000..123cb800180701d8186ae95938846844f83332da --- /dev/null +++ b/kyujinpy/Ko-PlatYi-6B-kiwi/result_2023-12-03 21:11:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35494880546075086, + "acc_stderr": 0.013983036904094095, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303024 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4047002589125672, + "acc_stderr": 0.004898308167211844, + "acc_norm": 0.5361481776538538, + "acc_norm_stderr": 0.00497672412485057 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5225806451612903, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.5225806451612903, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.02905858830374885, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.02905858830374885 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307712, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307712 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270697, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270697 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6440366972477064, + "acc_stderr": 0.020528559278244218, + "acc_norm": 0.6440366972477064, + "acc_norm_stderr": 0.020528559278244218 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225868, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225868 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.01994491413687358, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.01994491413687358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475363, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475363 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585895, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585895 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148123, + "mc2": 0.3830022681696009, + "mc2_stderr": 0.014630531364087136 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.538370720188902, + "acc_stderr": 0.017139660221845553, + "acc_norm": 0.6375442739079102, + "acc_norm_stderr": 0.016527131240453692 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Ko-PlatYi-6B-kiwi", + "model_sha": "ba064c7052774a9d2935b9066785962323f9190f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Ko-PlatYi-6B/result_2023-12-03 08:38:12.json b/kyujinpy/Ko-PlatYi-6B/result_2023-12-03 08:38:12.json new file mode 100644 index 0000000000000000000000000000000000000000..53f39ccd1e70dc432a32a2df5dda12364a6c9681 --- /dev/null +++ b/kyujinpy/Ko-PlatYi-6B/result_2023-12-03 08:38:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35665529010238906, + "acc_stderr": 0.013998056902620194, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4018123879705238, + "acc_stderr": 0.004892624490937213, + "acc_norm": 0.5354511053574985, + "acc_norm_stderr": 0.004977223485342017 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.017747874245683606, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.017747874245683606 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.028397944907806612, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.028397944907806612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095495, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871927, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871927 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596437, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596437 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.616580310880829, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.616580310880829, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43300653594771243, + "acc_stderr": 0.02004544247332422, + "acc_norm": 0.43300653594771243, + "acc_norm_stderr": 0.02004544247332422 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319482, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319482 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3513689700130378, + "acc_stderr": 0.012192969457484042, + "acc_norm": 0.3513689700130378, + "acc_norm_stderr": 0.012192969457484042 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.01550620472283456, + "mc2": 0.4031449478693446, + "mc2_stderr": 0.014887476682682517 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5903187721369539, + "acc_stderr": 0.016907568192219478, + "acc_norm": 0.6646989374262101, + "acc_norm_stderr": 0.01623098123298981 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Ko-PlatYi-6B", + "model_sha": "8be4a0d75b1fd70867e5f27184e2eb4e9a72dc37", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KoR-Orca-Platypus-13B/result_2023-10-14 11:37:08.json b/kyujinpy/KoR-Orca-Platypus-13B/result_2023-10-14 11:37:08.json new file mode 100644 index 0000000000000000000000000000000000000000..ed51c5ea8336cd0f01cc81e3e049af5917745f21 --- /dev/null +++ b/kyujinpy/KoR-Orca-Platypus-13B/result_2023-10-14 11:37:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.0140978106780422, + "acc_norm": 0.4206484641638225, + "acc_norm_stderr": 0.014426211252508394 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4047002589125672, + "acc_stderr": 0.004898308167211838, + "acc_norm": 0.5395339573790081, + "acc_norm_stderr": 0.004974159561342694 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5095785440613027, + "acc_stderr": 0.01787668227534086, + "acc_norm": 0.5095785440613027, + "acc_norm_stderr": 0.01787668227534086 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745657, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745657 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.037400593820293204, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.037400593820293204 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998576, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998576 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502744, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502744 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.035161847729521675, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.035161847729521675 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562427, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562427 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0383515395439942, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0383515395439942 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5376146788990825, + "acc_stderr": 0.021376575274397576, + "acc_norm": 0.5376146788990825, + "acc_norm_stderr": 0.021376575274397576 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573702, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573702 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983583, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983583 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35658409387222945, + "acc_stderr": 0.012233642989273891, + "acc_norm": 0.35658409387222945, + "acc_norm_stderr": 0.012233642989273891 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326905, + "mc2": 0.43550201857978377, + "mc2_stderr": 0.015311053526638174 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4498229043683589, + "acc_stderr": 0.017103573343825708, + "acc_norm": 0.5112160566706021, + "acc_norm_stderr": 0.017186028469489294 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KoR-Orca-Platypus-13B", + "model_sha": "66063590ce01dc70a30bcf04f1f8addd7e72f73b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KoT-platypus2-13B/result_2023-10-07 18:04:13.json b/kyujinpy/KoT-platypus2-13B/result_2023-10-07 18:04:13.json new file mode 100644 index 0000000000000000000000000000000000000000..be130bdbcdcda34f4c76c8830cf54dbeebf8b128 --- /dev/null +++ b/kyujinpy/KoT-platypus2-13B/result_2023-10-07 18:04:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142818, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256515 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40221071499701255, + "acc_stderr": 0.004893418929918276, + "acc_norm": 0.5304720175263892, + "acc_norm_stderr": 0.004980506329407588 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.017869330154003698, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.017869330154003698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03196876989195779, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03196876989195779 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.02486499515976777, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.02486499515976777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432564, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194048, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194048 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.02131133500970857, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.02131133500970857 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.0193733324207245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.0193733324207245 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611317, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210756, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210756 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.01231940336956464, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.01231940336956464 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237272, + "mc2": 0.4334291763920242, + "mc2_stderr": 0.014968924711902113 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3872491145218418, + "acc_stderr": 0.016747577991642792, + "acc_norm": 0.44391971664698937, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KoT-platypus2-13B", + "model_sha": "1d45520e4c2a4b5dc52dcafb788efb2420ad20b5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KoT-platypus2-7B/result_2023-09-30 05:32:02.json b/kyujinpy/KoT-platypus2-7B/result_2023-09-30 05:32:02.json new file mode 100644 index 0000000000000000000000000000000000000000..f4c86b8d5cd7365d2ec7b6cc9bab1f1425013b47 --- /dev/null +++ b/kyujinpy/KoT-platypus2-7B/result_2023-09-30 05:32:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.013659980894277375, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349819 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38179645488946423, + "acc_stderr": 0.004848341560492151, + "acc_norm": 0.4963154750049791, + "acc_norm_stderr": 0.004989645929811438 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.04721188506097172, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.04721188506097172 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39208173690932313, + "acc_stderr": 0.01745852405014764, + "acc_norm": 0.39208173690932313, + "acc_norm_stderr": 0.01745852405014764 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386705, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386705 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.39228295819935693, + "acc_stderr": 0.027731258647011998, + "acc_norm": 0.39228295819935693, + "acc_norm_stderr": 0.027731258647011998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330313, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330313 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31794871794871793, + "acc_stderr": 0.02361088430892786, + "acc_norm": 0.31794871794871793, + "acc_norm_stderr": 0.02361088430892786 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114485, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534327, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534327 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5, + "acc_stderr": 0.03275608910402091, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03275608910402091 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.029224526469124792, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.029224526469124792 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505415, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505415 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43781094527363185, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.43781094527363185, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267437, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267437 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415426, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415426 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.02653818910470548, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.02653818910470548 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.03680350371286461, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.03680350371286461 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.02716368603827123, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.02716368603827123 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3926605504587156, + "acc_stderr": 0.020937505161201093, + "acc_norm": 0.3926605504587156, + "acc_norm_stderr": 0.020937505161201093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.01899970738316266, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.01899970738316266 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460997, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460997 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966344, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966344 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.02997280717046463, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.02997280717046463 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.031251275910891656, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.031251275910891656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.01196531153657153, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.01196531153657153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752332, + "mc2": 0.37686510476734664, + "mc2_stderr": 0.014752533377181794 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21959858323494688, + "acc_stderr": 0.01423274308558027, + "acc_norm": 0.30932703659976385, + "acc_norm_stderr": 0.015891320505520893 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KoT-platypus2-7B", + "model_sha": "33eb53d72129db3b1936f07fd894a18b571d7ab6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Korean-OpenOrca-13B-v2/result_2023-10-30 20:07:34.json b/kyujinpy/Korean-OpenOrca-13B-v2/result_2023-10-30 20:07:34.json new file mode 100644 index 0000000000000000000000000000000000000000..83c0c1697d491e2053d05bb361947acec0bfaa6e --- /dev/null +++ b/kyujinpy/Korean-OpenOrca-13B-v2/result_2023-10-30 20:07:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251098, + "acc_norm": 0.431740614334471, + "acc_norm_stderr": 0.014474591427196202 + }, + "harness|ko_hellaswag|10": { + "acc": 0.410973909579765, + "acc_stderr": 0.004910049928688081, + "acc_norm": 0.5451105357498506, + "acc_norm_stderr": 0.004969431900874307 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5210727969348659, + "acc_stderr": 0.017864076786212914, + "acc_norm": 0.5210727969348659, + "acc_norm_stderr": 0.017864076786212914 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094527, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094527 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.03525675167467975, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.03525675167467975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523864, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5357798165137615, + "acc_stderr": 0.021382364775701906, + "acc_norm": 0.5357798165137615, + "acc_norm_stderr": 0.021382364775701906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223974, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223974 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966734, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966734 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503316, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.01532182168847619, + "mc2": 0.4181669609619488, + "mc2_stderr": 0.015057490220303692 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5100354191263282, + "acc_stderr": 0.01718689128689406, + "acc_norm": 0.5844155844155844, + "acc_norm_stderr": 0.016943586313076565 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Korean-OpenOrca-13B-v2", + "model_sha": "2b0b46a6b5b6bbf41029a0918c49ac11456c3512", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Korean-OpenOrca-13B/result_2023-10-09 08:06:20.json b/kyujinpy/Korean-OpenOrca-13B/result_2023-10-09 08:06:20.json new file mode 100644 index 0000000000000000000000000000000000000000..32c71ba3b209566af1b4bc8c3d2b5b70ddb08820 --- /dev/null +++ b/kyujinpy/Korean-OpenOrca-13B/result_2023-10-09 08:06:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37372013651877134, + "acc_stderr": 0.014137708601759091, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.014471133392642475 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4080860386377216, + "acc_stderr": 0.004904747752286962, + "acc_norm": 0.5413264289982075, + "acc_norm_stderr": 0.0049727083696565425 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.01786209177850786, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.01786209177850786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.028217683556652315, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.028217683556652315 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969566, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048487, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048487 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833935, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833935 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.02811092849280907, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.02811092849280907 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.01916241858862356, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.01916241858862356 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169934, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169934 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.027365861131513805, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.027365861131513805 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30247718383311606, + "acc_stderr": 0.0117315242341657, + "acc_norm": 0.30247718383311606, + "acc_norm_stderr": 0.0117315242341657 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502332, + "mc2": 0.4522241098057631, + "mc2_stderr": 0.015289294572002421 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5301062573789846, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.01674757799164278 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Korean-OpenOrca-13B", + "model_sha": "1f0024f9356a1601ba642c01fd01b309c59b65b8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Korean-OpenOrca-v3/result_2023-11-04 08:15:25.json b/kyujinpy/Korean-OpenOrca-v3/result_2023-11-04 08:15:25.json new file mode 100644 index 0000000000000000000000000000000000000000..fab9fcc32d7901ff3e3ada998a4a8dcb4316fe05 --- /dev/null +++ b/kyujinpy/Korean-OpenOrca-v3/result_2023-11-04 08:15:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38822525597269625, + "acc_stderr": 0.014241614207414044, + "acc_norm": 0.4377133105802048, + "acc_norm_stderr": 0.014497573881108288 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40938060147381, + "acc_stderr": 0.004907146229347557, + "acc_norm": 0.5430193188607847, + "acc_norm_stderr": 0.004971278309204196 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.01785298126663394, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.01785298126663394 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03540294377095368, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03540294377095368 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808779, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808779 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655812, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655812 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756653, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836183, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836183 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5137614678899083, + "acc_stderr": 0.021429202089874075, + "acc_norm": 0.5137614678899083, + "acc_norm_stderr": 0.021429202089874075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981747, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981747 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.019139943748487043, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.019139943748487043 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347019, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347019 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841196, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3161668839634941, + "acc_stderr": 0.011875780894386578, + "acc_norm": 0.3161668839634941, + "acc_norm_stderr": 0.011875780894386578 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608735, + "mc2": 0.43845744832254846, + "mc2_stderr": 0.015212073687467038 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5277449822904369, + "acc_stderr": 0.017163867979456016, + "acc_norm": 0.6056670602125147, + "acc_norm_stderr": 0.01680209067489322 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Korean-OpenOrca-v3", + "model_sha": "933525ee691e7002a63054208e012a6bf6b08623", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Kosy-Platypus2-13B/result_2023-10-24 13:04:50.json b/kyujinpy/Kosy-Platypus2-13B/result_2023-10-24 13:04:50.json new file mode 100644 index 0000000000000000000000000000000000000000..b80882a0f8a667ab68ec7939cc327642e240f2a8 --- /dev/null +++ b/kyujinpy/Kosy-Platypus2-13B/result_2023-10-24 13:04:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580122 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40679147580163316, + "acc_stderr": 0.0049023140557255904, + "acc_norm": 0.5388368850826528, + "acc_norm_stderr": 0.00497470642843428 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.01787084750608174, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.01787084750608174 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.02518914989476419, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.02518914989476419 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220385, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.030242233800854498, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.030242233800854498 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696525, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.0276671385694227, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.0276671385694227 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426465, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426465 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687754, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687754 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35984354628422427, + "acc_stderr": 0.012258260483689803, + "acc_norm": 0.35984354628422427, + "acc_norm_stderr": 0.012258260483689803 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522509, + "mc2": 0.43461012650741965, + "mc2_stderr": 0.015133199211121806 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346453, + "acc_norm": 0.34238488783943327, + "acc_norm_stderr": 0.016313907844146373 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Kosy-Platypus2-13B", + "model_sha": "7d5af714d5429ed3496f73e8a44525bec4a73d20", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Kosy-platypus2-13B-v2/result_2023-10-26 16:55:23.json b/kyujinpy/Kosy-platypus2-13B-v2/result_2023-10-26 16:55:23.json new file mode 100644 index 0000000000000000000000000000000000000000..aff2f85c8df60da5e65f4604b2289447e6bf76e0 --- /dev/null +++ b/kyujinpy/Kosy-platypus2-13B-v2/result_2023-10-26 16:55:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251093, + "acc_norm": 0.44197952218430037, + "acc_norm_stderr": 0.014512682523128343 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4116709818761203, + "acc_stderr": 0.004911303569769794, + "acc_norm": 0.5456084445329615, + "acc_norm_stderr": 0.004968979259738331 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.017869330154003698, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.017869330154003698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231015, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231015 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168284, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168284 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042328, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042328 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523867, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523867 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5431192660550459, + "acc_stderr": 0.02135745878522621, + "acc_norm": 0.5431192660550459, + "acc_norm_stderr": 0.02135745878522621 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.019333142020797073, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.019333142020797073 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650133, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650133 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176852, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176852 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824852, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824852 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.011971507294982775, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.011971507294982775 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237033, + "mc2": 0.42682166058026266, + "mc2_stderr": 0.015080394807895544 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3837072018890201, + "acc_stderr": 0.016718924637231822, + "acc_norm": 0.43211334120425027, + "acc_norm_stderr": 0.01703117019885175 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Kosy-platypus2-13B-v2", + "model_sha": "6f4bbfe83457bd7c30e3229be576883534ae37b1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Kosy-platypus2-13B-v3/result_2023-10-27 11:47:47.json b/kyujinpy/Kosy-platypus2-13B-v3/result_2023-10-27 11:47:47.json new file mode 100644 index 0000000000000000000000000000000000000000..3698ec9a8efa58cace3872e7c4f7103482a1d31d --- /dev/null +++ b/kyujinpy/Kosy-platypus2-13B-v3/result_2023-10-27 11:47:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38139931740614336, + "acc_stderr": 0.014194389086685247, + "acc_norm": 0.4334470989761092, + "acc_norm_stderr": 0.0144813762245589 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4124676359290978, + "acc_stderr": 0.004912723848944785, + "acc_norm": 0.5454092810197172, + "acc_norm_stderr": 0.004969160917379657 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.017867695938429774, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.017867695938429774 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.034815208033673474, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.034815208033673474 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5522935779816514, + "acc_stderr": 0.021319754962425462, + "acc_norm": 0.5522935779816514, + "acc_norm_stderr": 0.021319754962425462 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.01967580813528151, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.01967580813528151 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125478, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125478 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3513689700130378, + "acc_stderr": 0.01219296945748402, + "acc_norm": 0.3513689700130378, + "acc_norm_stderr": 0.01219296945748402 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.44114088993275297, + "mc2_stderr": 0.015165075535391745 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38961038961038963, + "acc_stderr": 0.0167661616718935, + "acc_norm": 0.46162927981109797, + "acc_norm_stderr": 0.01713966022184556 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Kosy-platypus2-13B-v3", + "model_sha": "221e5e31480c06f46c707f92ea261bb2903729f2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Kosy-platypus2-13B-v4/result_2023-10-28 18:01:16.json b/kyujinpy/Kosy-platypus2-13B-v4/result_2023-10-28 18:01:16.json new file mode 100644 index 0000000000000000000000000000000000000000..30f385d87060b79b8f1ae7b272b40f300861ccd4 --- /dev/null +++ b/kyujinpy/Kosy-platypus2-13B-v4/result_2023-10-28 18:01:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042194, + "acc_norm": 0.42918088737201365, + "acc_norm_stderr": 0.014464085894870657 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4082852021509659, + "acc_stderr": 0.00490511903984946, + "acc_norm": 0.5448117904799841, + "acc_norm_stderr": 0.00496970108106838 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.017862091778507852, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.017862091778507852 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.02834504586484063, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.02834504586484063 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686854, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686854 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.046166311118017146, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.046166311118017146 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540643, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540643 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.035161847729521654, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.035161847729521654 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376896, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376896 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138936, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138936 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.042663394431593955, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.042663394431593955 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176853, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176853 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010071, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010071 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983583, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983583 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.03195514741370672, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.03195514741370672 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.363754889178618, + "acc_stderr": 0.012286991879902887, + "acc_norm": 0.363754889178618, + "acc_norm_stderr": 0.012286991879902887 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015477, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015477 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237272, + "mc2": 0.4299526725985081, + "mc2_stderr": 0.015097005552664109 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3825265643447462, + "acc_stderr": 0.01670916538722882, + "acc_norm": 0.41204250295159384, + "acc_norm_stderr": 0.01692227673852836 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Kosy-platypus2-13B-v4", + "model_sha": "f7f8972f4e1221436272e0d16b946c816373ce93", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Kosy-platypus2-13B-v5/result_2023-11-01 20:41:13.json b/kyujinpy/Kosy-platypus2-13B-v5/result_2023-11-01 20:41:13.json new file mode 100644 index 0000000000000000000000000000000000000000..ebeed6abaa595fd5db7850dd4b649c415900107d --- /dev/null +++ b/kyujinpy/Kosy-platypus2-13B-v5/result_2023-11-01 20:41:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.01411129875167495, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.014471133392642463 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4063931487751444, + "acc_stderr": 0.004901558132335531, + "acc_norm": 0.5361481776538538, + "acc_norm_stderr": 0.004976724124850573 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.017875748840242407, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.017875748840242407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685516, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685516 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.043171711948702535, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.043171711948702535 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.040434618619167466, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.040434618619167466 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03196876989195779, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03196876989195779 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540646, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540646 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.02983280811479601, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.02983280811479601 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.02763490726417854, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.02763490726417854 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.0368122963339432, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.0368122963339432 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.02333065405453589, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.02333065405453589 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138938, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138938 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881688, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881688 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424523, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424523 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.01941253924203216, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.01941253924203216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611313, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611313 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289804, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.378748370273794, + "acc_stderr": 0.012389052105003734, + "acc_norm": 0.378748370273794, + "acc_norm_stderr": 0.012389052105003734 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476196, + "mc2": 0.4346617472360019, + "mc2_stderr": 0.015047283841012499 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3754427390791027, + "acc_stderr": 0.016648411589511084, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.017072525875563103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Kosy-platypus2-13B-v5", + "model_sha": "1a82ca82d6bb7b00b7318dc21f431f8c15fca3bc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Mistral-7B-Ko-v2/result_2023-11-17 02:55:02.json b/kyujinpy/Mistral-7B-Ko-v2/result_2023-11-17 02:55:02.json new file mode 100644 index 0000000000000000000000000000000000000000..b807c653bbc80a57a3210718d8d6fad1cc6bee4b --- /dev/null +++ b/kyujinpy/Mistral-7B-Ko-v2/result_2023-11-17 02:55:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33276450511945393, + "acc_stderr": 0.013769863046192304, + "acc_norm": 0.36689419795221845, + "acc_norm_stderr": 0.014084133118104296 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3646683927504481, + "acc_stderr": 0.004803533333364229, + "acc_norm": 0.4689304919338777, + "acc_norm_stderr": 0.004980138679161039 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44572158365261816, + "acc_stderr": 0.017774297282479506, + "acc_norm": 0.44572158365261816, + "acc_norm_stderr": 0.017774297282479506 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079023, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.02804339985821063, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.02804339985821063 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413925, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413925 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767762, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767762 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.029224526469124792, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.029224526469124792 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815642, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815642 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02459497512892094, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02459497512892094 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927213, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927213 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456602, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456602 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144807, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44036697247706424, + "acc_stderr": 0.02128431062376155, + "acc_norm": 0.44036697247706424, + "acc_norm_stderr": 0.02128431062376155 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626564, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626564 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.019139943748487022, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.019139943748487022 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808848, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808848 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254184, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254184 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089166, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089166 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31029986962190353, + "acc_stderr": 0.011815439293469813, + "acc_norm": 0.31029986962190353, + "acc_norm_stderr": 0.011815439293469813 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608746, + "mc2": 0.429618345662767, + "mc2_stderr": 0.015308199749335972 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3577331759149941, + "acc_stderr": 0.016479808935749976, + "acc_norm": 0.4935064935064935, + "acc_norm_stderr": 0.017188904359077304 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Mistral-7B-Ko-v2", + "model_sha": "7a9974a87cb1ec441eb64b1bde9a4ab2ad76db4c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Mistral-7B-Ko-v3/result_2023-11-24 08:51:45.json b/kyujinpy/Mistral-7B-Ko-v3/result_2023-11-24 08:51:45.json new file mode 100644 index 0000000000000000000000000000000000000000..efdeb727b0845c3b160e1b32288db7c6547b3604 --- /dev/null +++ b/kyujinpy/Mistral-7B-Ko-v3/result_2023-11-24 08:51:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.01389693846114568, + "acc_norm": 0.3873720136518771, + "acc_norm_stderr": 0.014235872487909874 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3736307508464449, + "acc_stderr": 0.00482778628907485, + "acc_norm": 0.48615813582951606, + "acc_norm_stderr": 0.004987868988629998 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.017865944827291612, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.017865944827291612 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.028447965476231022, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.028447965476231022 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.029443169323031537, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.029443169323031537 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159784, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159784 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717861, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717861 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336182, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336182 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.021436998359765324, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.021436998359765324 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.02862747055055606, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.02862747055055606 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094597, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094597 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293647, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293647 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010088, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010088 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159706, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159706 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.03195514741370672, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.03195514741370672 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.01209559250693197, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.01209559250693197 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627877, + "mc2": 0.43927914817995606, + "mc2_stderr": 0.015458133669329948 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190192, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Mistral-7B-Ko-v3", + "model_sha": "d55ba861816137dd858e44d1db4e4dcefae09f55", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Mistral-7B-ko-v1/result_2023-11-17 02:55:15.json b/kyujinpy/Mistral-7B-ko-v1/result_2023-11-17 02:55:15.json new file mode 100644 index 0000000000000000000000000000000000000000..08d88083069ed8ca5be65b5487de48ba996e1d58 --- /dev/null +++ b/kyujinpy/Mistral-7B-ko-v1/result_2023-11-17 02:55:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635473, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.014150631435111728 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3654650468034256, + "acc_stderr": 0.004805761513803415, + "acc_norm": 0.4763991236805417, + "acc_norm_stderr": 0.004984219681732662 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.01775880053421442, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.01775880053421442 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079023, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017827, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017827 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942645, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696525, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699954, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.027002521034516478, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.027002521034516478 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46605504587155966, + "acc_stderr": 0.02138786335035399, + "acc_norm": 0.46605504587155966, + "acc_norm_stderr": 0.02138786335035399 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423556, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423556 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.01945076843250551, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.01945076843250551 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100998, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100998 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761968, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761968 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.34285714285714286, + "acc_stderr": 0.030387262919547735, + "acc_norm": 0.34285714285714286, + "acc_norm_stderr": 0.030387262919547735 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846146, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846146 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.03742597043806585, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.03742597043806585 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834553, + "mc2": 0.43674349953921776, + "mc2_stderr": 0.015557097313851508 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.01719034212344865, + "acc_norm": 0.5796930342384888, + "acc_norm_stderr": 0.016970598281177713 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Mistral-7B-ko-v1", + "model_sha": "7e4aaac38b8c44fca2cf9b90d82504dbf1b6b66c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Mistral-koplatypus-v1/result_2023-11-08 15:04:52.json b/kyujinpy/Mistral-koplatypus-v1/result_2023-11-08 15:04:52.json new file mode 100644 index 0000000000000000000000000000000000000000..964e09b1cee690b2b795d0de440552b2dd5b61a3 --- /dev/null +++ b/kyujinpy/Mistral-koplatypus-v1/result_2023-11-08 15:04:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3378839590443686, + "acc_stderr": 0.013822047922283507, + "acc_norm": 0.3771331058020478, + "acc_norm_stderr": 0.014163366896192587 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3685520812587134, + "acc_stderr": 0.004814261966376847, + "acc_norm": 0.4778928500298745, + "acc_norm_stderr": 0.004984901752846396 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4623243933588761, + "acc_stderr": 0.017829131764287198, + "acc_norm": 0.4623243933588761, + "acc_norm_stderr": 0.017829131764287198 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223264, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557835, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557835 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360385, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360385 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.03567603799639171, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.03567603799639171 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.02143295620345332, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.02143295620345332 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483184, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483184 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.020007912739359365, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.020007912739359365 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878634, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608042, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608042 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220501, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220501 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.02902942281568141, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.02902942281568141 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163908, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163908 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37614080834419816, + "acc_stderr": 0.012372214430599816, + "acc_norm": 0.37614080834419816, + "acc_norm_stderr": 0.012372214430599816 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627877, + "mc2": 0.45967145819252797, + "mc2_stderr": 0.015531270159359699 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36481700118063753, + "acc_stderr": 0.016550144337046595, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.016977101932601525 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Mistral-koplatypus-v1", + "model_sha": "520c485adecb47aefb23b0b3f5fb2240886651d8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/ko-platypus-kiwi-13B/result_2023-11-14 12:30:19.json b/kyujinpy/ko-platypus-kiwi-13B/result_2023-11-14 12:30:19.json new file mode 100644 index 0000000000000000000000000000000000000000..13825c7be3bbbc80686ea985e759c7b156a2bddd --- /dev/null +++ b/kyujinpy/ko-platypus-kiwi-13B/result_2023-11-14 12:30:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000324, + "acc_norm": 0.42406143344709896, + "acc_norm_stderr": 0.014441889627464396 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40888269269069905, + "acc_stderr": 0.004906227902850752, + "acc_norm": 0.5429197371041625, + "acc_norm_stderr": 0.0049713640310625916 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633938, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.030135906478517563, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.030135906478517563 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.025174048384000766, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.025174048384000766 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523867, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523867 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5027522935779817, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.5027522935779817, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.038522733649243135, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.038522733649243135 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355435, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861131, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861131 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012404, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012404 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.02806499816704009, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.02806499816704009 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.303129074315515, + "acc_stderr": 0.011738669951254296, + "acc_norm": 0.303129074315515, + "acc_norm_stderr": 0.011738669951254296 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.039025510073744475, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.039025510073744475 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707689, + "mc2": 0.4004845005349835, + "mc2_stderr": 0.014923471142092035 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5655253837072018, + "acc_stderr": 0.017042098620824935, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.01627295299701915 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/ko-platypus-kiwi-13B", + "model_sha": "069a1dd610e02969baaecbe54305a431e6e18d23", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/kosy-openorca/result_2023-11-03 06:06:41.json b/kyujinpy/kosy-openorca/result_2023-11-03 06:06:41.json new file mode 100644 index 0000000000000000000000000000000000000000..ec3ed9a33eec28523302a0f4fdc6bc337db1f9ba --- /dev/null +++ b/kyujinpy/kosy-openorca/result_2023-11-03 06:06:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256512 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40838478390758814, + "acc_stderr": 0.004905304371090869, + "acc_norm": 0.5449113722366062, + "acc_norm_stderr": 0.0049696115546853945 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5197956577266922, + "acc_stderr": 0.01786594482729163, + "acc_norm": 0.5197956577266922, + "acc_norm_stderr": 0.01786594482729163 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.0332085274234831, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.0332085274234831 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091852, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696545, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696545 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.026830805998952243, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.026830805998952243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.018999707383162662, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.018999707383162662 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605607, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605607 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.02643132987078955, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.02643132987078955 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935893, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935893 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271808, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271808 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.016058999026100626, + "mc2": 0.45297396150774194, + "mc2_stderr": 0.015202543307381022 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49586776859504134, + "acc_stderr": 0.017189767032130824, + "acc_norm": 0.5879574970484062, + "acc_norm_stderr": 0.01692227673852836 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/kosy-openorca", + "model_sha": "52c7495d1e211ac32c5e383418f3c1019c8883e1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/mistral-Ko-Orca-7B/result_2023-11-01 14:00:13.json b/kyujinpy/mistral-Ko-Orca-7B/result_2023-11-01 14:00:13.json new file mode 100644 index 0000000000000000000000000000000000000000..25f088a53963c82fa6e43c6e3303ed20ecc44411 --- /dev/null +++ b/kyujinpy/mistral-Ko-Orca-7B/result_2023-11-01 14:00:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.013760988200880534, + "acc_norm": 0.3720136518771331, + "acc_norm_stderr": 0.014124597881844456 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36885082652858, + "acc_stderr": 0.0048150733340005985, + "acc_norm": 0.47689703246365267, + "acc_norm_stderr": 0.004984452002563923 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4355044699872286, + "acc_stderr": 0.01773058992792661, + "acc_norm": 0.4355044699872286, + "acc_norm_stderr": 0.01773058992792661 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.031778212502369216, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.031778212502369216 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330313, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330313 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.039609335494512087, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.039609335494512087 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03196876989195779, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03196876989195779 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.024915243985987833, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.024915243985987833 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.027906150826041136, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.027906150826041136 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159665, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159665 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523864, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357334, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.035780381650085846, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.035780381650085846 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824096, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824096 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33169934640522875, + "acc_stderr": 0.01904748523936038, + "acc_norm": 0.33169934640522875, + "acc_norm_stderr": 0.01904748523936038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669276, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669276 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.03253302807877738, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.03253302807877738 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271805, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271805 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.01559475363200651, + "mc2": 0.434406639546979, + "mc2_stderr": 0.01542967405561873 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.5655253837072018, + "acc_norm_stderr": 0.01704209862082493 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/mistral-Ko-Orca-7B", + "model_sha": "9658ad38439195153512b6b7117f94000d8f5c3a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lIlBrother/llama2-merge-v0.1/result_2023-11-10 13:59:02.json b/lIlBrother/llama2-merge-v0.1/result_2023-11-10 13:59:02.json new file mode 100644 index 0000000000000000000000000000000000000000..fdb0c03f34288cef2ceed242814e8318768b9738 --- /dev/null +++ b/lIlBrother/llama2-merge-v0.1/result_2023-11-10 13:59:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4104095563139932, + "acc_stderr": 0.014374922192642662, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019672 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44054969129655447, + "acc_stderr": 0.004954384702021653, + "acc_norm": 0.5907189802828122, + "acc_norm_stderr": 0.004906962980328293 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394216, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540218, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540218 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5562700964630225, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.5562700964630225, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.02517404838400076, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.02517404838400076 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575494, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575494 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054063, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009794, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009794 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.037786210790920545, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.037786210790920545 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5154320987654321, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.5154320987654321, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5522935779816514, + "acc_stderr": 0.02131975496242546, + "acc_norm": 0.5522935779816514, + "acc_norm_stderr": 0.02131975496242546 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.0200176292142131, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.0200176292142131 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042394, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042394 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898428, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898428 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.03181425118197787, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.03181425118197787 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610798, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610798 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3604954367666232, + "acc_stderr": 0.01226311023729924, + "acc_norm": 0.3604954367666232, + "acc_norm_stderr": 0.01226311023729924 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3353733170134639, + "mc1_stderr": 0.016527534039668987, + "mc2": 0.5055997170755017, + "mc2_stderr": 0.015472709251284784 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.017156666859785473 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lIlBrother/llama2-merge-v0.1", + "model_sha": "7c5ff11a49acb01a0b030ae244509c224dd2377e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lIlBrother/llama2-merge-v0.2/result_2023-11-10 14:27:07.json b/lIlBrother/llama2-merge-v0.2/result_2023-11-10 14:27:07.json new file mode 100644 index 0000000000000000000000000000000000000000..d696128709bdaf6358c3d87cf135042d92ad3f92 --- /dev/null +++ b/lIlBrother/llama2-merge-v0.2/result_2023-11-10 14:27:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225398, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019672 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4404501095399323, + "acc_stderr": 0.004954265595373461, + "acc_norm": 0.5911173073093009, + "acc_norm_stderr": 0.004906227902850757 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.037792759455032014, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.037792759455032014 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394216, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.032025630761017346, + "acc_norm": 0.4, + "acc_norm_stderr": 0.032025630761017346 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540218, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540218 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5562700964630225, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.5562700964630225, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054063, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009794, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009794 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.037786210790920545, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.037786210790920545 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5522935779816514, + "acc_stderr": 0.02131975496242546, + "acc_norm": 0.5522935779816514, + "acc_norm_stderr": 0.02131975496242546 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.0200176292142131, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.0200176292142131 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042394, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042394 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898428, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898428 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.03181425118197787, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.03181425118197787 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.03078154910202622, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.03078154910202622 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36310299869621904, + "acc_stderr": 0.012282264406018765, + "acc_norm": 0.36310299869621904, + "acc_norm_stderr": 0.012282264406018765 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3353733170134639, + "mc1_stderr": 0.016527534039668987, + "mc2": 0.5056596305681444, + "mc2_stderr": 0.015473471074051047 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.5301062573789846, + "acc_norm_stderr": 0.01715916359017022 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lIlBrother/llama2-merge-v0.2", + "model_sha": "570a429fcf20bc0af28daf1286bc45d1829f5122", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lIlBrother/llama2-merge-v0.3/result_2023-11-11 02:04:09.json b/lIlBrother/llama2-merge-v0.3/result_2023-11-11 02:04:09.json new file mode 100644 index 0000000000000000000000000000000000000000..27792e8d137d80573ced55959bd547d23daeb547 --- /dev/null +++ b/lIlBrother/llama2-merge-v0.3/result_2023-11-11 02:04:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3873720136518771, + "acc_stderr": 0.014235872487909869, + "acc_norm": 0.4257679180887372, + "acc_norm_stderr": 0.014449464278868803 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43616809400517825, + "acc_stderr": 0.004948952519517522, + "acc_norm": 0.5819557857000598, + "acc_norm_stderr": 0.004922294797766665 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.017747874245683602, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.017747874245683602 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.02524277098712617, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.02524277098712617 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540632, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.03058805297427065, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.03058805297427065 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730575, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.026772990653361816, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.026772990653361816 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.021162420048273515, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.021162420048273515 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401164, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401164 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293648, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293648 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3378212974296206, + "mc1_stderr": 0.016557167322516893, + "mc2": 0.506693694303165, + "mc2_stderr": 0.015539179895065392 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4722550177095632, + "acc_stderr": 0.01716386797945601, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.017161563949916345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lIlBrother/llama2-merge-v0.3", + "model_sha": "9051a05341dbfb26b4a83210c8fb6d72b6b64bca", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lIlBrother/llama2-merge-v0.4/result_2023-12-04 12:04:17.json b/lIlBrother/llama2-merge-v0.4/result_2023-12-04 12:04:17.json new file mode 100644 index 0000000000000000000000000000000000000000..bb631d0ac5b8a364df993e23487770c5777930a1 --- /dev/null +++ b/lIlBrother/llama2-merge-v0.4/result_2023-12-04 12:04:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41552901023890787, + "acc_stderr": 0.014401366641216376, + "acc_norm": 0.4786689419795222, + "acc_norm_stderr": 0.014598087973127106 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4296952798247361, + "acc_stderr": 0.0049402086413720785, + "acc_norm": 0.5809599681338379, + "acc_norm_stderr": 0.004923935749842495 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.01775880053421441, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.01775880053421441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.035094383488796295, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.035094383488796295 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.031342504862454025, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.031342504862454025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871927, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871927 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778657, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778657 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972592, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972592 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323674, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323674 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401164, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401164 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611317, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.02985526139348393, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.02985526139348393 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3741851368970013, + "acc_stderr": 0.012359335618172063, + "acc_norm": 0.3741851368970013, + "acc_norm_stderr": 0.012359335618172063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.44672046780889485, + "mc2_stderr": 0.01512449543014224 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46162927981109797, + "acc_stderr": 0.01713966022184556, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lIlBrother/llama2-merge-v0.4", + "model_sha": "71c4402544b11eef6ae4a156fe79a452f3c9db53", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-08 22:04:06.json b/lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-08 22:04:06.json new file mode 100644 index 0000000000000000000000000000000000000000..063bb5d99e47ec05b342aa9dbd006af36d83d04e --- /dev/null +++ b/lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-08 22:04:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938215, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019677 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4447321250746863, + "acc_stderr": 0.004959204773046197, + "acc_norm": 0.5995817566221868, + "acc_norm_stderr": 0.004889817489739683 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5325670498084292, + "acc_stderr": 0.01784199575052087, + "acc_norm": 0.5325670498084292, + "acc_norm_stderr": 0.01784199575052087 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362466, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362466 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03255326307272486, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03255326307272486 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.02994649856769995, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.02994649856769995 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.47761194029850745, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.47761194029850745, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.021432956203453327, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.021432956203453327 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088844, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088844 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03054674526495319, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03054674526495319 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144696, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144696 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048228, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048228 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559696, + "mc2": 0.47797395322509245, + "mc2_stderr": 0.015295300677969451 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3919716646989374, + "acc_stderr": 0.016784332119424088, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.017182864434998564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama2-ko-chang-13b-instruct-chat", + "model_sha": "50d21acccdfed4780c8f38892ae3a7dc30bf02b6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-13 02:04:55.json b/lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-13 02:04:55.json new file mode 100644 index 0000000000000000000000000000000000000000..15186f8a920ebe8d6d86066d074f9a4dd4e8ef5c --- /dev/null +++ b/lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-13 02:04:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938218, + "acc_norm": 0.4667235494880546, + "acc_norm_stderr": 0.014578995859605818 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4404501095399323, + "acc_stderr": 0.004954265595373462, + "acc_norm": 0.5983867755427206, + "acc_norm_stderr": 0.004892226011836585 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.545338441890166, + "acc_stderr": 0.0178063045850526, + "acc_norm": 0.545338441890166, + "acc_norm_stderr": 0.0178063045850526 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362466, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362466 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.02506909438729654, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.02506909438729654 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5726495726495726, + "acc_stderr": 0.03240847393516327, + "acc_norm": 0.5726495726495726, + "acc_norm_stderr": 0.03240847393516327 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230165, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230165 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101813, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101813 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353985, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353985 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.01955964680921593, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.01955964680921593 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298804, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553976, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553976 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763126, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763126 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.011773980329380726, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.011773980329380726 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.016451264440068246, + "mc2": 0.4970299025244721, + "mc2_stderr": 0.01555960496501192 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43683589138134593, + "acc_stderr": 0.01705263355985608, + "acc_norm": 0.5076741440377804, + "acc_norm_stderr": 0.017188329219654273 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama2-ko-chang-13b-instruct-chat", + "model_sha": "7eea2a6e0ff1251e701daf9171d72790e7b54c68", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-15 20:02:06.json b/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-15 20:02:06.json new file mode 100644 index 0000000000000000000000000000000000000000..cecac7d25ac732a742eb840c0d73dc96186d790d --- /dev/null +++ b/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-15 20:02:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.013888816286782114, + "acc_norm": 0.39419795221843, + "acc_norm_stderr": 0.014280522667467327 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39852619000199163, + "acc_stderr": 0.004885942040894556, + "acc_norm": 0.5248954391555467, + "acc_norm_stderr": 0.0049835924109341715 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37292464878671777, + "acc_stderr": 0.017292868269453924, + "acc_norm": 0.37292464878671777, + "acc_norm_stderr": 0.017292868269453924 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488558, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488558 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786751, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786751 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341926, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371372, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371372 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.029454863835292982, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.029454863835292982 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.025189006660212385, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.025189006660212385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.02700876609070809, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.02700876609070809 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.35323383084577115, + "acc_stderr": 0.03379790611796776, + "acc_norm": 0.35323383084577115, + "acc_norm_stderr": 0.03379790611796776 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889904, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889904 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.020842290930114662, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.020842290930114662 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153186, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153186 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.02640614597362566, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.02640614597362566 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.033088185944157494, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.033088185944157494 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29357798165137616, + "acc_stderr": 0.019525151122639667, + "acc_norm": 0.29357798165137616, + "acc_norm_stderr": 0.019525151122639667 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.026925654653615693, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.026925654653615693 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422273, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422273 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.027971541370170605, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.027971541370170605 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.273142112125163, + "acc_stderr": 0.01138015056783041, + "acc_norm": 0.273142112125163, + "acc_norm_stderr": 0.01138015056783041 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03283472056108567, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03283472056108567 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268048, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268048 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842885, + "mc2": 0.4212326635036667, + "mc2_stderr": 0.015192123492522393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3069657615112161, + "acc_stderr": 0.01585758809536281, + "acc_norm": 0.4025974025974026, + "acc_norm_stderr": 0.01686102048640779 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama2-ko-chang-instruct-chat", + "model_sha": "eaba470f33eb377cb27696dbc1f9a76fc03d4fe3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-instruct-chat/result_2023-11-06 22:33:48.json b/lcw99/llama2-ko-chang-instruct-chat/result_2023-11-06 22:33:48.json new file mode 100644 index 0000000000000000000000000000000000000000..7e2beca96f83f85257f61e54784f5a42d265c647 --- /dev/null +++ b/lcw99/llama2-ko-chang-instruct-chat/result_2023-11-06 22:33:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.013888816286782114, + "acc_norm": 0.39419795221843, + "acc_norm_stderr": 0.014280522667467327 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39852619000199163, + "acc_stderr": 0.004885942040894556, + "acc_norm": 0.5248954391555467, + "acc_norm_stderr": 0.0049835924109341715 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37292464878671777, + "acc_stderr": 0.017292868269453924, + "acc_norm": 0.37292464878671777, + "acc_norm_stderr": 0.017292868269453924 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488558, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488558 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786751, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786751 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341926, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371372, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371372 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.029454863835292982, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.029454863835292982 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.025189006660212385, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.025189006660212385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.02700876609070809, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.02700876609070809 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.35323383084577115, + "acc_stderr": 0.03379790611796776, + "acc_norm": 0.35323383084577115, + "acc_norm_stderr": 0.03379790611796776 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889904, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889904 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.020842290930114662, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.020842290930114662 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153186, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153186 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.02640614597362566, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.02640614597362566 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.033088185944157494, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.033088185944157494 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29357798165137616, + "acc_stderr": 0.019525151122639667, + "acc_norm": 0.29357798165137616, + "acc_norm_stderr": 0.019525151122639667 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.026925654653615693, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.026925654653615693 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422273, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422273 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.027971541370170605, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.027971541370170605 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.273142112125163, + "acc_stderr": 0.01138015056783041, + "acc_norm": 0.273142112125163, + "acc_norm_stderr": 0.01138015056783041 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03283472056108567, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03283472056108567 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268048, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268048 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842885, + "mc2": 0.4212326635036667, + "mc2_stderr": 0.015192123492522393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3069657615112161, + "acc_stderr": 0.01585758809536281, + "acc_norm": 0.4025974025974026, + "acc_norm_stderr": 0.01686102048640779 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama2-ko-chang-instruct-chat", + "model_sha": "034f986b57b4746cbc6332ac14ff7f0041b66ba3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/polyglot-ko-12.8b-chang-instruct-chat/result_2023-09-27 08:17:24.json b/lcw99/polyglot-ko-12.8b-chang-instruct-chat/result_2023-09-27 08:17:24.json new file mode 100644 index 0000000000000000000000000000000000000000..be5563e1e0d50ee09531b351e3d559de2ffa8ecd --- /dev/null +++ b/lcw99/polyglot-ko-12.8b-chang-instruct-chat/result_2023-09-27 08:17:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2986348122866894, + "acc_stderr": 0.013374078615068756, + "acc_norm": 0.34897610921501704, + "acc_norm_stderr": 0.013928933461382497 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4010157339175463, + "acc_stderr": 0.004891025533633027, + "acc_norm": 0.527185819557857, + "acc_norm_stderr": 0.004982400368939667 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26947637292464877, + "acc_stderr": 0.01586624307321506, + "acc_norm": 0.26947637292464877, + "acc_norm_stderr": 0.01586624307321506 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.0281854413012341, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.0281854413012341 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.033293941190735296, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.033293941190735296 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3183279742765273, + "acc_stderr": 0.026457225067811025, + "acc_norm": 0.3183279742765273, + "acc_norm_stderr": 0.026457225067811025 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438015, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438015 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171451, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171451 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.02684151432295893, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.02684151432295893 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094631, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094631 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.025189006660212385, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.025189006660212385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899105, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878285, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878285 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.20398009950248755, + "acc_stderr": 0.02849317624532609, + "acc_norm": 0.20398009950248755, + "acc_norm_stderr": 0.02849317624532609 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.03214737302029469, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.03214737302029469 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730575, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.036845294917747094, + "acc_norm": 0.16, + "acc_norm_stderr": 0.036845294917747094 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.02361867831006937, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.02361867831006937 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.02508947852376513, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.02508947852376513 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24587155963302754, + "acc_stderr": 0.018461940968708457, + "acc_norm": 0.24587155963302754, + "acc_norm_stderr": 0.018461940968708457 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102148, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102148 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.024848018263875195, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.024848018263875195 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.017740899509177788, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.017740899509177788 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.19148936170212766, + "acc_stderr": 0.023472645247949425, + "acc_norm": 0.19148936170212766, + "acc_norm_stderr": 0.023472645247949425 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225606, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225606 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625162, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625162 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.28270042194092826, + "acc_stderr": 0.029312814153955914, + "acc_norm": 0.28270042194092826, + "acc_norm_stderr": 0.029312814153955914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2542372881355932, + "acc_stderr": 0.011121129007840664, + "acc_norm": 0.2542372881355932, + "acc_norm_stderr": 0.011121129007840664 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.01578537085839671, + "mc2": 0.4444330897605926, + "mc2_stderr": 0.015483222855074748 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2798110979929162, + "acc_stderr": 0.015433715795427764, + "acc_norm": 0.35182998819362454, + "acc_norm_stderr": 0.016418206451218057 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/polyglot-ko-12.8b-chang-instruct-chat", + "model_sha": "a16de096eb135e66b90314e5ab84116c9f0f9d1b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/zephykor-ko-7b-chang/result_2023-11-01 10:28:02.json b/lcw99/zephykor-ko-7b-chang/result_2023-11-01 10:28:02.json new file mode 100644 index 0000000000000000000000000000000000000000..c4c55b2fe7b77056347782457118eb319a418885 --- /dev/null +++ b/lcw99/zephykor-ko-7b-chang/result_2023-11-01 10:28:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32764505119453924, + "acc_stderr": 0.013715847940719344, + "acc_norm": 0.3728668941979522, + "acc_norm_stderr": 0.014131176760131158 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3679545907189803, + "acc_stderr": 0.004812633280078263, + "acc_norm": 0.48376817367058356, + "acc_norm_stderr": 0.004987151381091178 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.036602988340491624, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.036602988340491624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236923, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236923 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.028013651891995072, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.028013651891995072 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.024162780284017717, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.024162780284017717 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539635, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.03248577511578401, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.03248577511578401 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815632, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815632 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.472636815920398, + "acc_stderr": 0.03530235517334682, + "acc_norm": 0.472636815920398, + "acc_norm_stderr": 0.03530235517334682 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112136, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112136 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3786127167630058, + "acc_stderr": 0.026113749361310338, + "acc_norm": 0.3786127167630058, + "acc_norm_stderr": 0.026113749361310338 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271226, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271226 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4, + "acc_stderr": 0.02100420126042007, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02100420126042007 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4628099173553719, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926606, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926606 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34477124183006536, + "acc_stderr": 0.019228322018696644, + "acc_norm": 0.34477124183006536, + "acc_norm_stderr": 0.019228322018696644 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2245810055865922, + "acc_stderr": 0.013956803666544636, + "acc_norm": 0.2245810055865922, + "acc_norm_stderr": 0.013956803666544636 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411966, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411966 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.030472526026726503, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.030472526026726503 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30834419817470665, + "acc_stderr": 0.011794833789715322, + "acc_norm": 0.30834419817470665, + "acc_norm_stderr": 0.011794833789715322 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.49077303683687423, + "mc2_stderr": 0.015584509571305388 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.01547327158398843, + "acc_norm": 0.37662337662337664, + "acc_norm_stderr": 0.016658799874051968 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/zephykor-ko-7b-chang", + "model_sha": "417731f0f84b698065589bb915528f30040cd23b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/zephykor-ko-beta-7b-chang/result_2023-11-27 00:57:43.json b/lcw99/zephykor-ko-beta-7b-chang/result_2023-11-27 00:57:43.json new file mode 100644 index 0000000000000000000000000000000000000000..33860b54eb668b74d04cfd01c372cac31275559e --- /dev/null +++ b/lcw99/zephykor-ko-beta-7b-chang/result_2023-11-27 00:57:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31399317406143346, + "acc_stderr": 0.013562691224726288, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759091 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3684524995020912, + "acc_stderr": 0.004813991069808272, + "acc_norm": 0.4765982871937861, + "acc_norm_stderr": 0.00498431320579144 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259264, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259264 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.01786594482729162, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.01786594482729162 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368878, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368878 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596239, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596239 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3954983922829582, + "acc_stderr": 0.027770918531427834, + "acc_norm": 0.3954983922829582, + "acc_norm_stderr": 0.027770918531427834 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.034648816750163375, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.034648816750163375 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.028942004040998164, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.028942004040998164 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3, + "acc_stderr": 0.0232345810884285, + "acc_norm": 0.3, + "acc_norm_stderr": 0.0232345810884285 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36129032258064514, + "acc_stderr": 0.027327548447957532, + "acc_norm": 0.36129032258064514, + "acc_norm_stderr": 0.027327548447957532 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3283018867924528, + "acc_stderr": 0.02890159361241178, + "acc_norm": 0.3283018867924528, + "acc_norm_stderr": 0.02890159361241178 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505415, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505415 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276612, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276612 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3786127167630058, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.3786127167630058, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.345679012345679, + "acc_stderr": 0.02646248777700187, + "acc_norm": 0.345679012345679, + "acc_norm_stderr": 0.02646248777700187 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3871559633027523, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.3871559633027523, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.027121956071388852, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.027121956071388852 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33169934640522875, + "acc_stderr": 0.01904748523936038, + "acc_norm": 0.33169934640522875, + "acc_norm_stderr": 0.01904748523936038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590624, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590624 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19907407407407407, + "acc_stderr": 0.027232298462690242, + "acc_norm": 0.19907407407407407, + "acc_norm_stderr": 0.027232298462690242 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29832402234636873, + "acc_stderr": 0.015301840045129267, + "acc_norm": 0.29832402234636873, + "acc_norm_stderr": 0.015301840045129267 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.02604066247420126, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.02604066247420126 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29465449804432853, + "acc_stderr": 0.01164357676406955, + "acc_norm": 0.29465449804432853, + "acc_norm_stderr": 0.01164357676406955 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.0157853708583967, + "mc2": 0.48693248002205786, + "mc2_stderr": 0.01610559804856284 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2514757969303424, + "acc_stderr": 0.014916462437232242, + "acc_norm": 0.3730814639905549, + "acc_norm_stderr": 0.016627318275137432 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/zephykor-ko-beta-7b-chang", + "model_sha": "16733d9f8333702df52876b684c4927c73882b07", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-02 09:12:17.json b/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-02 09:12:17.json new file mode 100644 index 0000000000000000000000000000000000000000..178e487189aed7ac5bead2421e9cfb677c4354b9 --- /dev/null +++ b/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-02 09:12:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3267918088737201, + "acc_stderr": 0.013706665975587333, + "acc_norm": 0.39419795221843, + "acc_norm_stderr": 0.01428052266746732 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3685520812587134, + "acc_stderr": 0.004814261966376846, + "acc_norm": 0.48665604461262696, + "acc_norm_stderr": 0.004988004122536506 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.01787994891443167, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.01787994891443167 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135776, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135776 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41919191919191917, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.41919191919191917, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.039417076320648906, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.039417076320648906 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.030283995525884396, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.030283995525884396 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33076923076923076, + "acc_stderr": 0.02385479568097114, + "acc_norm": 0.33076923076923076, + "acc_norm_stderr": 0.02385479568097114 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233484, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233484 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132267, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132267 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.029224526469124792, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.029224526469124792 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.04653429807913509, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.04653429807913509 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4527363184079602, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.4527363184079602, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.03533133389323657, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.03533133389323657 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153172, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153172 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292406, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292406 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3765432098765432, + "acc_stderr": 0.02695934451874778, + "acc_norm": 0.3765432098765432, + "acc_norm_stderr": 0.02695934451874778 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4073394495412844, + "acc_stderr": 0.021065986244412898, + "acc_norm": 0.4073394495412844, + "acc_norm_stderr": 0.021065986244412898 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3349673202614379, + "acc_stderr": 0.01909422816700031, + "acc_norm": 0.3349673202614379, + "acc_norm_stderr": 0.01909422816700031 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503782, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503782 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.0305467452649532, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.0305467452649532 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261462, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261462 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.02806499816704009, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.02806499816704009 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2966101694915254, + "acc_stderr": 0.011665946586082868, + "acc_norm": 0.2966101694915254, + "acc_norm_stderr": 0.011665946586082868 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396704, + "mc2": 0.4892217653375252, + "mc2_stderr": 0.016000110388200085 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27390791027154665, + "acc_stderr": 0.015332499474791027, + "acc_norm": 0.42266824085005905, + "acc_norm_stderr": 0.0169835060795776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/zephykor-ko-beta-7b-chang", + "model_sha": "c5c706f4042ccbcd767c157d7046beef1b9f8493", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-04 03:57:05.json b/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-04 03:57:05.json new file mode 100644 index 0000000000000000000000000000000000000000..b1388374be64cbec7aa339edab9338c696dd8454 --- /dev/null +++ b/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-04 03:57:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946528, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735569 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3708424616610237, + "acc_stderr": 0.004820431839600025, + "acc_norm": 0.4785899223262298, + "acc_norm_stderr": 0.004985204766555068 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5019157088122606, + "acc_stderr": 0.01787983225902668, + "acc_norm": 0.5019157088122606, + "acc_norm_stderr": 0.01787983225902668 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.028043399858210628, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.028043399858210628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.03343577705583065, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.03343577705583065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.46464646464646464, + "acc_stderr": 0.03553436368828061, + "acc_norm": 0.46464646464646464, + "acc_norm_stderr": 0.03553436368828061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03038835355188685, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03038835355188685 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.03332769068410789, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.03332769068410789 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.02786932057166463, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02786932057166463 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5470085470085471, + "acc_stderr": 0.03261099873098619, + "acc_norm": 0.5470085470085471, + "acc_norm_stderr": 0.03261099873098619 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.029582245128384296, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.029582245128384296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.02723741509459248, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.02723741509459248 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42385321100917434, + "acc_stderr": 0.02118726320908754, + "acc_norm": 0.42385321100917434, + "acc_norm_stderr": 0.02118726320908754 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.044492703500683836, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.044492703500683836 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.01877168389352817, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.01877168389352817 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510923, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510923 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26256983240223464, + "acc_stderr": 0.01471682427301776, + "acc_norm": 0.26256983240223464, + "acc_norm_stderr": 0.01471682427301776 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681397, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681397 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5274261603375527, + "acc_stderr": 0.03249822718301304, + "acc_norm": 0.5274261603375527, + "acc_norm_stderr": 0.03249822718301304 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29791395045632335, + "acc_stderr": 0.011680717340400049, + "acc_norm": 0.29791395045632335, + "acc_norm_stderr": 0.011680717340400049 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.49768897705057563, + "mc2_stderr": 0.01589857721446022 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2668240850059032, + "acc_stderr": 0.015206575684565904, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.017057753702160287 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/zephykor-ko-beta-7b-chang", + "model_sha": "6958b487ce529ff5114d25b1ba2accc84bf5f8a8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-25 01:20:18.json b/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-25 01:20:18.json new file mode 100644 index 0000000000000000000000000000000000000000..1e70ba5e1ea37184c2fd21e8ca39996e167b9822 --- /dev/null +++ b/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-25 01:20:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.318259385665529, + "acc_stderr": 0.013611993916971453, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407163 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37134037044413465, + "acc_stderr": 0.00482175773415672, + "acc_norm": 0.4766978689504083, + "acc_norm_stderr": 0.004984359669951927 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.017870847506081734, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.017870847506081734 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368878, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368878 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.031709956060406545, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.031709956060406545 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.38263665594855306, + "acc_stderr": 0.027604689028581982, + "acc_norm": 0.38263665594855306, + "acc_norm_stderr": 0.027604689028581982 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.03318833286217281, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.03318833286217281 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792399, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792399 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3319327731092437, + "acc_stderr": 0.030588697013783663, + "acc_norm": 0.3319327731092437, + "acc_norm_stderr": 0.030588697013783663 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.02416278028401772, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.02416278028401772 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5470085470085471, + "acc_stderr": 0.03261099873098619, + "acc_norm": 0.5470085470085471, + "acc_norm_stderr": 0.03261099873098619 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776292, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505415, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505415 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881564, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881564 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159664, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948375, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948375 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.02723741509459247, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.02723741509459247 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39896373056994816, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.39896373056994816, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42201834862385323, + "acc_stderr": 0.02117499140776317, + "acc_norm": 0.42201834862385323, + "acc_norm_stderr": 0.02117499140776317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791438, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791438 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706207, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516992, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516992 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833586, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833586 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.02993534270787775, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.02993534270787775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048224, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.015744027248256055, + "mc2": 0.48857937286231085, + "mc2_stderr": 0.015975836453033188 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3140495867768595, + "acc_stderr": 0.01595733243429507, + "acc_norm": 0.45336481700118064, + "acc_norm_stderr": 0.017115418225226865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/zephykor-ko-beta-7b-chang", + "model_sha": "67d0bf6e69c6e705ca28b54349429ffc7f473b7a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leeebs/kollama2_ndap/result_2023-12-07 05:20:29.json b/leeebs/kollama2_ndap/result_2023-12-07 05:20:29.json new file mode 100644 index 0000000000000000000000000000000000000000..bec6bd96f977597f4755abadf64ce6d36f1b30fa --- /dev/null +++ b/leeebs/kollama2_ndap/result_2023-12-07 05:20:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.3677474402730375, + "acc_norm_stderr": 0.014090995618168473 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38189603664608646, + "acc_stderr": 0.004848583243606688, + "acc_norm": 0.4862577175861382, + "acc_norm_stderr": 0.004987896411703674 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3090676883780332, + "acc_stderr": 0.01652498891970219, + "acc_norm": 0.3090676883780332, + "acc_norm_stderr": 0.01652498891970219 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893944, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893944 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533086, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533086 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342867, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342867 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678244, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678244 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764805, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764805 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443865, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443865 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.0430911870994646, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.0430911870994646 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823017, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823017 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845335, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.021855255263421806, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.021855255263421806 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.024748624490537368, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.024748624490537368 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26972477064220185, + "acc_stderr": 0.01902848671111544, + "acc_norm": 0.26972477064220185, + "acc_norm_stderr": 0.01902848671111544 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.017704531653250075, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.017704531653250075 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2620599739243807, + "acc_stderr": 0.011231552795890392, + "acc_norm": 0.2620599739243807, + "acc_norm_stderr": 0.011231552795890392 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967409, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967409 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.01500067437357034, + "mc2": 0.39136302132648476, + "mc2_stderr": 0.01593302794463535 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2833530106257379, + "acc_stderr": 0.015492852084597239, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.016068253615813967 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leeebs/kollama2_ndap", + "model_sha": "a98a2530390c0402e33e35503b05249acf4ef790", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leeebs/kollama2_ndap/result_2023-12-07 05:25:10.json b/leeebs/kollama2_ndap/result_2023-12-07 05:25:10.json new file mode 100644 index 0000000000000000000000000000000000000000..bec6bd96f977597f4755abadf64ce6d36f1b30fa --- /dev/null +++ b/leeebs/kollama2_ndap/result_2023-12-07 05:25:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.3677474402730375, + "acc_norm_stderr": 0.014090995618168473 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38189603664608646, + "acc_stderr": 0.004848583243606688, + "acc_norm": 0.4862577175861382, + "acc_norm_stderr": 0.004987896411703674 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3090676883780332, + "acc_stderr": 0.01652498891970219, + "acc_norm": 0.3090676883780332, + "acc_norm_stderr": 0.01652498891970219 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893944, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893944 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533086, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533086 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342867, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342867 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678244, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678244 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764805, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764805 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443865, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443865 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.0430911870994646, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.0430911870994646 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823017, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823017 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845335, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.021855255263421806, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.021855255263421806 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.024748624490537368, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.024748624490537368 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26972477064220185, + "acc_stderr": 0.01902848671111544, + "acc_norm": 0.26972477064220185, + "acc_norm_stderr": 0.01902848671111544 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.017704531653250075, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.017704531653250075 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2620599739243807, + "acc_stderr": 0.011231552795890392, + "acc_norm": 0.2620599739243807, + "acc_norm_stderr": 0.011231552795890392 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967409, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967409 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.01500067437357034, + "mc2": 0.39136302132648476, + "mc2_stderr": 0.01593302794463535 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2833530106257379, + "acc_stderr": 0.015492852084597239, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.016068253615813967 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leeebs/kollama2_ndap", + "model_sha": "a98a2530390c0402e33e35503b05249acf4ef790", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leeebs/llama2-ndap-10-7b/result_2023-12-08 08:04:53.json b/leeebs/llama2-ndap-10-7b/result_2023-12-08 08:04:53.json new file mode 100644 index 0000000000000000000000000000000000000000..a98f61c2ca75fb06dc98ca7a31f04e1024fd64f6 --- /dev/null +++ b/leeebs/llama2-ndap-10-7b/result_2023-12-08 08:04:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32337883959044367, + "acc_stderr": 0.01366942163001212, + "acc_norm": 0.3575085324232082, + "acc_norm_stderr": 0.014005494275916571 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3776140211113324, + "acc_stderr": 0.004837995637638535, + "acc_norm": 0.47998406691894047, + "acc_norm_stderr": 0.0049857816204670205 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.31800766283524906, + "acc_stderr": 0.016653486275615394, + "acc_norm": 0.31800766283524906, + "acc_norm_stderr": 0.016653486275615394 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031023, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.026664410886937606, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.026664410886937606 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.03664666337225256, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.03664666337225256 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.026653531596715473, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.026653531596715473 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24615384615384617, + "acc_stderr": 0.021840866990423077, + "acc_norm": 0.24615384615384617, + "acc_norm_stderr": 0.021840866990423077 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642752, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.02468597928623997, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.02468597928623997 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.31196581196581197, + "acc_stderr": 0.03035152732334495, + "acc_norm": 0.31196581196581197, + "acc_norm_stderr": 0.03035152732334495 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708097, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708097 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095932, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095932 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.031871875379197986, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.031871875379197986 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.034624199316156234, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.034624199316156234 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886324, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.03646758875075566, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.03646758875075566 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117457, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117457 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.025058503316958174, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.025058503316958174 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484594, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484594 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266736, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266736 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.02541642838876748, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.02541642838876748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20955882352941177, + "acc_stderr": 0.024723110407677062, + "acc_norm": 0.20955882352941177, + "acc_norm_stderr": 0.024723110407677062 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.027833023871399683, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.027833023871399683 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.010946570966348783, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.010946570966348783 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.034531318018854146, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.034531318018854146 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23378212974296206, + "mc1_stderr": 0.014816195991931583, + "mc2": 0.3853810991215767, + "mc2_stderr": 0.015883616014330193 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30342384887839435, + "acc_stderr": 0.01580607271790957, + "acc_norm": 0.31641086186540734, + "acc_norm_stderr": 0.015989617951065474 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leeebs/llama2-ndap-10-7b", + "model_sha": "89e23643d1cdc9ece9f6c0a2b379dbadb67984f5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leo911kim/Exodia-7B/result_2023-10-19 12:01:38.json b/leo911kim/Exodia-7B/result_2023-10-19 12:01:38.json new file mode 100644 index 0000000000000000000000000000000000000000..dba9d0dda4d029d863e7a26c19049a34e47a60df --- /dev/null +++ b/leo911kim/Exodia-7B/result_2023-10-19 12:01:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19027303754266212, + "acc_stderr": 0.011470424179225702, + "acc_norm": 0.2363481228668942, + "acc_norm_stderr": 0.012414960524301832 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2703644692292372, + "acc_stderr": 0.004432403734882273, + "acc_norm": 0.2969527982473611, + "acc_norm_stderr": 0.004559817589182076 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.03675668832233188, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.03675668832233188 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.01598281477469563, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.01598281477469563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785137, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785137 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370519, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370519 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.026003301117885142, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.026003301117885142 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.29596412556053814, + "acc_stderr": 0.030636591348699817, + "acc_norm": 0.29596412556053814, + "acc_norm_stderr": 0.030636591348699817 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728744, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728744 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.028606204289229872, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.028606204289229872 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135303, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135303 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.02907937453948001, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.02907937453948001 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2743589743589744, + "acc_stderr": 0.02262276576749322, + "acc_norm": 0.2743589743589744, + "acc_norm_stderr": 0.02262276576749322 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.031947400722655395, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.031947400722655395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.02499305339776482, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.02499305339776482 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32905982905982906, + "acc_stderr": 0.030782321577688163, + "acc_norm": 0.32905982905982906, + "acc_norm_stderr": 0.030782321577688163 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899105, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916718, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916718 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.17341040462427745, + "acc_stderr": 0.02886810787497064, + "acc_norm": 0.17341040462427745, + "acc_norm_stderr": 0.02886810787497064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.021591269407823774, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.021591269407823774 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757177, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.024383665531035457, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.024383665531035457 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860695, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860695 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24954128440366974, + "acc_stderr": 0.018553897629501614, + "acc_norm": 0.24954128440366974, + "acc_norm_stderr": 0.018553897629501614 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.12698412698412698, + "acc_stderr": 0.029780417522688434, + "acc_norm": 0.12698412698412698, + "acc_norm_stderr": 0.029780417522688434 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.023929155517351294, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.023929155517351294 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2369281045751634, + "acc_stderr": 0.017201662169789796, + "acc_norm": 0.2369281045751634, + "acc_norm_stderr": 0.017201662169789796 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.03018753206032938, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.03018753206032938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142773, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142773 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.027820781981149685, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.027820781981149685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.227509778357236, + "acc_stderr": 0.010707188576864226, + "acc_norm": 0.227509778357236, + "acc_norm_stderr": 0.010707188576864226 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501936, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501936 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237269, + "mc2": 0.42019223039185516, + "mc2_stderr": 0.01650268606738961 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21959858323494688, + "acc_stderr": 0.01423274308558026, + "acc_norm": 0.31641086186540734, + "acc_norm_stderr": 0.015989617951065474 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leo911kim/Exodia-7B", + "model_sha": "b3e1f98b934da7498bb18ce0cb9e0fc857593656", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leo911kim/Exodia-kor-7B-v2/result_2023-10-19 16:35:19.json b/leo911kim/Exodia-kor-7B-v2/result_2023-10-19 16:35:19.json new file mode 100644 index 0000000000000000000000000000000000000000..6cc628a9b59c05cfe6139937019b895ee3fc125f --- /dev/null +++ b/leo911kim/Exodia-kor-7B-v2/result_2023-10-19 16:35:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.0127669237941168, + "acc_norm": 0.3122866894197952, + "acc_norm_stderr": 0.013542598541688065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.344353714399522, + "acc_stderr": 0.004741859753178411, + "acc_norm": 0.4522007568213503, + "acc_norm_stderr": 0.004966928094797574 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3524904214559387, + "acc_stderr": 0.017084150244081376, + "acc_norm": 0.3524904214559387, + "acc_norm_stderr": 0.017084150244081376 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.029379170464124818, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.029379170464124818 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.02731684767419272, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.02731684767419272 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134987, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134987 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.023901157979402544, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.023901157979402544 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536821, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536821 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03010833071801162, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03010833071801162 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534323, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534323 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4700854700854701, + "acc_stderr": 0.03269741106812444, + "acc_norm": 0.4700854700854701, + "acc_norm_stderr": 0.03269741106812444 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946459, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946459 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095932, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095932 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4228855721393035, + "acc_stderr": 0.03493231777421282, + "acc_norm": 0.4228855721393035, + "acc_norm_stderr": 0.03493231777421282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.034140140070440354, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.034140140070440354 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918424, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3487654320987654, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.3487654320987654, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295341, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295341 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3651376146788991, + "acc_stderr": 0.020642801454384005, + "acc_norm": 0.3651376146788991, + "acc_norm_stderr": 0.020642801454384005 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4380165289256198, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696042, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696042 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2245810055865922, + "acc_stderr": 0.01395680366654464, + "acc_norm": 0.2245810055865922, + "acc_norm_stderr": 0.01395680366654464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.030116426296540613, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.030116426296540613 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.288135593220339, + "acc_stderr": 0.011567140661324561, + "acc_norm": 0.288135593220339, + "acc_norm_stderr": 0.011567140661324561 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.01494881267906214, + "mc2": 0.3833926324458877, + "mc2_stderr": 0.015094351709331206 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.19952774498229045, + "acc_stderr": 0.01374009094762133, + "acc_norm": 0.3022432113341204, + "acc_norm_stderr": 0.01578865486302237 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leo911kim/Exodia-kor-7b-v2", + "model_sha": "f759698eb4ddc2b9afa9d234ee130e10ce92a61a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leo911kim/Exodia-kor-7B-v2/result_2023-10-19 23:16:19.json b/leo911kim/Exodia-kor-7B-v2/result_2023-10-19 23:16:19.json new file mode 100644 index 0000000000000000000000000000000000000000..a5aeac4d4a8a5709c90c852e418008a82db4930c --- /dev/null +++ b/leo911kim/Exodia-kor-7B-v2/result_2023-10-19 23:16:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.0127669237941168, + "acc_norm": 0.3122866894197952, + "acc_norm_stderr": 0.013542598541688065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.344353714399522, + "acc_stderr": 0.004741859753178411, + "acc_norm": 0.4522007568213503, + "acc_norm_stderr": 0.004966928094797574 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3524904214559387, + "acc_stderr": 0.017084150244081376, + "acc_norm": 0.3524904214559387, + "acc_norm_stderr": 0.017084150244081376 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.029379170464124818, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.029379170464124818 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.02731684767419272, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.02731684767419272 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134987, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134987 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.023901157979402544, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.023901157979402544 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536821, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536821 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03010833071801162, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03010833071801162 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534323, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534323 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4700854700854701, + "acc_stderr": 0.03269741106812444, + "acc_norm": 0.4700854700854701, + "acc_norm_stderr": 0.03269741106812444 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946459, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946459 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095932, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095932 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4228855721393035, + "acc_stderr": 0.03493231777421282, + "acc_norm": 0.4228855721393035, + "acc_norm_stderr": 0.03493231777421282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.034140140070440354, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.034140140070440354 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918424, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3487654320987654, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.3487654320987654, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295341, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295341 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3651376146788991, + "acc_stderr": 0.020642801454384005, + "acc_norm": 0.3651376146788991, + "acc_norm_stderr": 0.020642801454384005 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4380165289256198, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696042, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696042 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2245810055865922, + "acc_stderr": 0.01395680366654464, + "acc_norm": 0.2245810055865922, + "acc_norm_stderr": 0.01395680366654464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.030116426296540613, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.030116426296540613 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.288135593220339, + "acc_stderr": 0.011567140661324561, + "acc_norm": 0.288135593220339, + "acc_norm_stderr": 0.011567140661324561 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.01494881267906214, + "mc2": 0.3833926324458877, + "mc2_stderr": 0.015094351709331206 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.19952774498229045, + "acc_stderr": 0.01374009094762133, + "acc_norm": 0.3022432113341204, + "acc_norm_stderr": 0.01578865486302237 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leo911kim/Exodia-kor-7B-v2", + "model_sha": "f759698eb4ddc2b9afa9d234ee130e10ce92a61a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leo911kim/Exodia-kor-7b/result_2023-10-19 10:10:30.json b/leo911kim/Exodia-kor-7b/result_2023-10-19 10:10:30.json new file mode 100644 index 0000000000000000000000000000000000000000..a40d86080c7cb1c68f7aa2219165356298c1b73c --- /dev/null +++ b/leo911kim/Exodia-kor-7b/result_2023-10-19 10:10:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2636518771331058, + "acc_stderr": 0.012875929151297066, + "acc_norm": 0.3370307167235495, + "acc_norm_stderr": 0.01381347665290227 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35331607249551883, + "acc_stderr": 0.004770229206838891, + "acc_norm": 0.4847639912368054, + "acc_norm_stderr": 0.004987464257999312 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.0398913985953177, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.0398913985953177 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26053639846743293, + "acc_stderr": 0.015696008563807096, + "acc_norm": 0.26053639846743293, + "acc_norm_stderr": 0.015696008563807096 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.035478541985608236, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.035478541985608236 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21221864951768488, + "acc_stderr": 0.023222756797435122, + "acc_norm": 0.21221864951768488, + "acc_norm_stderr": 0.023222756797435122 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134987, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134987 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267042, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267042 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2689075630252101, + "acc_stderr": 0.028801392193631273, + "acc_norm": 0.2689075630252101, + "acc_norm_stderr": 0.028801392193631273 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.02102067268082791, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.02102067268082791 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030049, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030049 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2161290322580645, + "acc_stderr": 0.023415293433568525, + "acc_norm": 0.2161290322580645, + "acc_norm_stderr": 0.023415293433568525 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.37606837606837606, + "acc_stderr": 0.031733936329694824, + "acc_norm": 0.37606837606837606, + "acc_norm_stderr": 0.031733936329694824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.024720713193952172, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.024720713193952172 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.0320384104021332, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.0320384104021332 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.0309528902177499, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.0309528902177499 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.023948512905468348, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.023948512905468348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25308641975308643, + "acc_stderr": 0.024191808600713, + "acc_norm": 0.25308641975308643, + "acc_norm_stderr": 0.024191808600713 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.029778663037752943, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.029778663037752943 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343585, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.02591780611714716, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.02591780611714716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.017952449196987866, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.017952449196987866 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872405, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.17592592592592593, + "acc_stderr": 0.025967420958258526, + "acc_norm": 0.17592592592592593, + "acc_norm_stderr": 0.025967420958258526 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.024562204314142314, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.024562204314142314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174913, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.030274974880218974, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.030274974880218974 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.01118610904656461, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.01118610904656461 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.032876667586034886, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.032876667586034886 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.01510240479735965, + "mc2": 0.39305582191498534, + "mc2_stderr": 0.015037592654153921 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21133412042502953, + "acc_stderr": 0.014036090342930314, + "acc_norm": 0.3010625737898465, + "acc_norm_stderr": 0.015771113299945457 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leo911kim/Exodia-kor-7b", + "model_sha": "dfb83ef9894aadda3301f98602d4d45cfd19c192", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lifelongeek/ko-7b-ins/result_2023-10-13 13:58:31.json b/lifelongeek/ko-7b-ins/result_2023-10-13 13:58:31.json new file mode 100644 index 0000000000000000000000000000000000000000..e8dd984ed86a1800b1746ee48283426459a63d3c --- /dev/null +++ b/lifelongeek/ko-7b-ins/result_2023-10-13 13:58:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20051194539249148, + "acc_stderr": 0.01170031805049937, + "acc_norm": 0.2363481228668942, + "acc_norm_stderr": 0.012414960524301811 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2500497908783111, + "acc_stderr": 0.004321564303822431, + "acc_norm": 0.246265684126668, + "acc_norm_stderr": 0.004299546103761434 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396736, + "mc2": 0.4901664286815018, + "mc2_stderr": 0.016461517029586932 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0743801652892562, + "acc_stderr": 0.009021104510906089, + "acc_norm": 0.3152302243211334, + "acc_norm_stderr": 0.015973534923794486 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lifelongeek/ko-7b-ins", + "model_sha": "4970a8c78104fed617103be2763fb54e8e90ca72", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lmsys/vicuna-13b-v1.5/result_2023-10-29 08:20:20.json b/lmsys/vicuna-13b-v1.5/result_2023-10-29 08:20:20.json new file mode 100644 index 0000000000000000000000000000000000000000..29017402e327122642a67d67dcb83284fdc08833 --- /dev/null +++ b/lmsys/vicuna-13b-v1.5/result_2023-10-29 08:20:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3199658703071672, + "acc_stderr": 0.013631345807016195, + "acc_norm": 0.36177474402730375, + "acc_norm_stderr": 0.014041957945038078 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3619796853216491, + "acc_stderr": 0.004795908282584544, + "acc_norm": 0.45180242979486157, + "acc_norm_stderr": 0.004966544724452228 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.017841995750520857, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.017841995750520857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.02795048149440127, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.02795048149440127 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.024503472557110946, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.024503472557110946 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776296, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587192, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587192 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523864, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.404320987654321, + "acc_stderr": 0.027306625297327698, + "acc_norm": 0.404320987654321, + "acc_norm_stderr": 0.027306625297327698 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.037124548537213684, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.037124548537213684 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41284403669724773, + "acc_stderr": 0.021109128133413906, + "acc_norm": 0.41284403669724773, + "acc_norm_stderr": 0.021109128133413906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.018999707383162662, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.018999707383162662 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.02872386385328128, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.02872386385328128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.040598672469526864, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.040598672469526864 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005337, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005337 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210749, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210749 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.027365861131513805, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.027365861131513805 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.01198993664066653, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.01198993664066653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155072, + "mc2": 0.4781712790136037, + "mc2_stderr": 0.015927322204823676 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3152302243211334, + "acc_stderr": 0.01597353492379446, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.01646770698152745 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lmsys/vicuna-13b-v1.5", + "model_sha": "3deb0106f72a3a433f0c6ea0cb978bdf14bcd3a6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/madatnlp/marcoroni-7b-v3-safetensor/result_2023-12-12 05:14:51.json b/madatnlp/marcoroni-7b-v3-safetensor/result_2023-12-12 05:14:51.json new file mode 100644 index 0000000000000000000000000000000000000000..2a53e4601c33dc4f2c9c495d94ed1d9762823816 --- /dev/null +++ b/madatnlp/marcoroni-7b-v3-safetensor/result_2023-12-12 05:14:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3643344709897611, + "acc_stderr": 0.014063260279882417, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39245170284803826, + "acc_stderr": 0.004872984492967996, + "acc_norm": 0.5104560844453296, + "acc_norm_stderr": 0.004988690229505665 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.03515520728670417, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.03515520728670417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643945, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643945 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933914, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933914 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417604, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857416, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833946, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833946 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.03606065001832917, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.03606065001832917 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5119266055045871, + "acc_stderr": 0.021431223617362233, + "acc_norm": 0.5119266055045871, + "acc_norm_stderr": 0.021431223617362233 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475358, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475358 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983576, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.012106817203067208, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.012106817203067208 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3561811505507956, + "mc1_stderr": 0.016763790728446342, + "mc2": 0.5364735673869772, + "mc2_stderr": 0.015999759828332336 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4014167650531287, + "acc_stderr": 0.01685290785872906, + "acc_norm": 0.41440377804014167, + "acc_norm_stderr": 0.016936583383943615 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "madatnlp/marcoroni-7b-v3-safetensor", + "model_sha": "20702b50c9eee355bfae17aab64276e2c8da420f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/madatnlp/mist-enko-lora-2950/result_2023-12-17 00:06:10.json b/madatnlp/mist-enko-lora-2950/result_2023-12-17 00:06:10.json new file mode 100644 index 0000000000000000000000000000000000000000..917e5df86b93b429fe91efa2290061f5e79f94f5 --- /dev/null +++ b/madatnlp/mist-enko-lora-2950/result_2023-12-17 00:06:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179352, + "acc_norm": 0.39505119453924914, + "acc_norm_stderr": 0.014285898292938175 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37651862178848833, + "acc_stderr": 0.0048352227940065195, + "acc_norm": 0.4856602270464051, + "acc_norm_stderr": 0.004987728900897595 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4904214559386973, + "acc_stderr": 0.01787668227534088, + "acc_norm": 0.4904214559386973, + "acc_norm_stderr": 0.01787668227534088 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085335, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085335 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.0416656757710158, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.0416656757710158 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431194, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336183, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336183 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362227, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362227 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271758, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271758 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596143, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596143 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553988, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553988 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02952009569768776, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02952009569768776 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.47030792501762314, + "mc2_stderr": 0.01548100060962531 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46162927981109797, + "acc_stderr": 0.01713966022184556, + "acc_norm": 0.5159386068476978, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "madatnlp/mist-enko-lora-2950", + "model_sha": "5f993597257141d297766ddc3578576a236cdd43", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maum-ai/llamaum-13b-chat-qlora-s/result_2023-10-01 03:59:55.json b/maum-ai/llamaum-13b-chat-qlora-s/result_2023-10-01 03:59:55.json new file mode 100644 index 0000000000000000000000000000000000000000..fc3c6df5eccc35bae19c959041ded34411a46491 --- /dev/null +++ b/maum-ai/llamaum-13b-chat-qlora-s/result_2023-10-01 03:59:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3370307167235495, + "acc_stderr": 0.013813476652902279, + "acc_norm": 0.39419795221843, + "acc_norm_stderr": 0.014280522667467325 + }, + "harness|ko_hellaswag|10": { + "acc": 0.364070902210715, + "acc_stderr": 0.004801852881329742, + "acc_norm": 0.462158932483569, + "acc_norm_stderr": 0.004975470690867166 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.017852981266633955, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.017852981266633955 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413925, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413925 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849727, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849727 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.030242233800854498, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.030242233800854498 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815642, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815642 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307702, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307702 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43853211009174314, + "acc_stderr": 0.021274713073954562, + "acc_norm": 0.43853211009174314, + "acc_norm_stderr": 0.021274713073954562 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3545751633986928, + "acc_stderr": 0.019353360547553697, + "acc_norm": 0.3545751633986928, + "acc_norm_stderr": 0.019353360547553697 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152567, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152567 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585899, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585899 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03471157907953426, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03471157907953426 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4146930075606435, + "mc2_stderr": 0.015301613292343582 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4037780401416765, + "acc_stderr": 0.01686903154029863, + "acc_norm": 0.4557260920897285, + "acc_norm_stderr": 0.017122829143292644 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maum-ai/llamaum-13b-chat-qlora-s", + "model_sha": "209891592ed47343e7654b1b7fdc1a514089df3b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maum-ai/llamaum-13b-instruct-s/result_2023-10-11 14:37:45.json b/maum-ai/llamaum-13b-instruct-s/result_2023-10-11 14:37:45.json new file mode 100644 index 0000000000000000000000000000000000000000..9d744386ad10c2beaefa77c6bb7d36544c0f542d --- /dev/null +++ b/maum-ai/llamaum-13b-instruct-s/result_2023-10-11 14:37:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28668941979522183, + "acc_stderr": 0.013214986329274762, + "acc_norm": 0.35665529010238906, + "acc_norm_stderr": 0.013998056902620196 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35570603465445133, + "acc_stderr": 0.00477748315963403, + "acc_norm": 0.4393547102170882, + "acc_norm_stderr": 0.004952942072999276 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066165, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066165 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.042450224863844956, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.042450224863844956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3652618135376756, + "acc_stderr": 0.01721853002883864, + "acc_norm": 0.3652618135376756, + "acc_norm_stderr": 0.01721853002883864 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.029513196625539345, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.029513196625539345 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.23493975903614459, + "acc_stderr": 0.03300533186128922, + "acc_norm": 0.23493975903614459, + "acc_norm_stderr": 0.03300533186128922 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893947, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893947 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.031381476375754995, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.031381476375754995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728743, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728743 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.032087795587867514, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.032087795587867514 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.03831226048850333, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.03831226048850333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380565, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380565 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.02528441611490016, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.02528441611490016 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.032485775115783995, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.032485775115783995 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118345, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118345 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804725, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804725 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047875, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047875 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.03680350371286462, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.03680350371286462 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.02610567386140981, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.02610567386140981 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.018987462257978652, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.018987462257978652 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.034550710191021496, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.034550710191021496 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.026256053835718968, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.026256053835718968 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.018311653053648222, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.018311653053648222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993666, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993666 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.024231013370541097, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.024231013370541097 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.02737294220178816, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.02737294220178816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27249022164276404, + "acc_stderr": 0.01137165829431153, + "acc_norm": 0.27249022164276404, + "acc_norm_stderr": 0.01137165829431153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03308611113236436, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03308611113236436 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.4469469691662156, + "mc2_stderr": 0.015668694918169947 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091115, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maum-ai/llamaum-13b-instruct-s", + "model_sha": "d9a9f9c019908c2d302da856473891095ad81940", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maum-ai/llamaum-13b-instruct-v1/result_2023-10-11 06:55:52.json b/maum-ai/llamaum-13b-instruct-v1/result_2023-10-11 06:55:52.json new file mode 100644 index 0000000000000000000000000000000000000000..ea8863aa26057faf61ae8e47d57a3c9e067aa047 --- /dev/null +++ b/maum-ai/llamaum-13b-instruct-v1/result_2023-10-11 06:55:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180639, + "acc_norm": 0.45819112627986347, + "acc_norm_stderr": 0.014560220308714702 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41047600079665403, + "acc_stderr": 0.004909148239488287, + "acc_norm": 0.5376419040031866, + "acc_norm_stderr": 0.004975621147406092 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5006385696040868, + "acc_stderr": 0.01787994891443169, + "acc_norm": 0.5006385696040868, + "acc_norm_stderr": 0.01787994891443169 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562786, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562786 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.0397923663749741, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.0397923663749741 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102315, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557845, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696545, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696545 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523867, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523867 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927214, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927214 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.027780141207023344, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.027780141207023344 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236397, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025425, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025425 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681407, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681407 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330373, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330373 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.4426389060165117, + "mc2_stderr": 0.015221328776941925 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33412042502951594, + "acc_stderr": 0.01621676330423968, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.01701984753597221 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maum-ai/llamaum-13b-instruct-v1", + "model_sha": "10d1ae8e0155ba956a1e4cb16dd3b35415dea098", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Mistral-ko-7B-v0.1/result_2023-11-26 09:26:16.json b/maywell/Mistral-ko-7B-v0.1/result_2023-11-26 09:26:16.json new file mode 100644 index 0000000000000000000000000000000000000000..2f57ed5d77b252b4e2c9fbfba738d40dead03f88 --- /dev/null +++ b/maywell/Mistral-ko-7B-v0.1/result_2023-11-26 09:26:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27047781569965873, + "acc_stderr": 0.012980954547659556, + "acc_norm": 0.31143344709897613, + "acc_norm_stderr": 0.013532472099850947 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28759211312487554, + "acc_stderr": 0.004517148434180507, + "acc_norm": 0.31428002389962156, + "acc_norm_stderr": 0.00463279737528977 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041694, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041694 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.33077905491698595, + "acc_stderr": 0.016824818462563746, + "acc_norm": 0.33077905491698595, + "acc_norm_stderr": 0.016824818462563746 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.036643147772880864, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.036643147772880864 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3247588424437299, + "acc_stderr": 0.026596782287697043, + "acc_norm": 0.3247588424437299, + "acc_norm_stderr": 0.026596782287697043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.273542600896861, + "acc_stderr": 0.02991858670779883, + "acc_norm": 0.273542600896861, + "acc_norm_stderr": 0.02991858670779883 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300992, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300992 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.040824829046386284, + "acc_norm": 0.4, + "acc_norm_stderr": 0.040824829046386284 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.03086868260412163, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.03086868260412163 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30256410256410254, + "acc_stderr": 0.023290888053772725, + "acc_norm": 0.30256410256410254, + "acc_norm_stderr": 0.023290888053772725 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.03282649385304151, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.03282649385304151 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.35161290322580646, + "acc_stderr": 0.02716253782694846, + "acc_norm": 0.35161290322580646, + "acc_norm_stderr": 0.02716253782694846 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4658119658119658, + "acc_stderr": 0.03267942734081227, + "acc_norm": 0.4658119658119658, + "acc_norm_stderr": 0.03267942734081227 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30566037735849055, + "acc_stderr": 0.028353298073322666, + "acc_norm": 0.30566037735849055, + "acc_norm_stderr": 0.028353298073322666 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425464, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425464 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3283582089552239, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.3283582089552239, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.0240268463928735, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.0240268463928735 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3439306358381503, + "acc_stderr": 0.025574123786546648, + "acc_norm": 0.3439306358381503, + "acc_norm_stderr": 0.025574123786546648 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625658, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625658 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28256880733944956, + "acc_stderr": 0.019304243497707152, + "acc_norm": 0.28256880733944956, + "acc_norm_stderr": 0.019304243497707152 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.02768418188330288, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.02768418188330288 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4793388429752066, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.038234289699266046, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.038234289699266046 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.01855063450295296, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.01855063450295296 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915185, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915185 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.03154696285656629, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.03154696285656629 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.030781549102026223, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.030781549102026223 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2926988265971317, + "acc_stderr": 0.011620949195849536, + "acc_norm": 0.2926988265971317, + "acc_norm_stderr": 0.011620949195849536 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.03270287181482079, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.03270287181482079 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520702, + "mc2": 0.4233292952140553, + "mc2_stderr": 0.015664004103265215 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.19008264462809918, + "acc_stderr": 0.013489827742736773, + "acc_norm": 0.30814639905548996, + "acc_norm_stderr": 0.01587451515629839 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Mistral-ko-7B-v0.1", + "model_sha": "01bdf68f5185b57eac642128c0940bf926c4d473", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/PiVoT-0.1-early/result_2023-11-24 07:54:04.json b/maywell/PiVoT-0.1-early/result_2023-11-24 07:54:04.json new file mode 100644 index 0000000000000000000000000000000000000000..b6099da4b2811a1dad5f47349eadcd8bb1c66c3a --- /dev/null +++ b/maywell/PiVoT-0.1-early/result_2023-11-24 07:54:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4206484641638225, + "acc_stderr": 0.01442621125250841, + "acc_norm": 0.47525597269624575, + "acc_norm_stderr": 0.014593487694937738 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41286596295558653, + "acc_stderr": 0.004913429010559069, + "acc_norm": 0.538338976299542, + "acc_norm_stderr": 0.004975091055697189 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5747126436781609, + "acc_stderr": 0.01767922548943146, + "acc_norm": 0.5747126436781609, + "acc_norm_stderr": 0.01767922548943146 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.031709956060406545, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.031709956060406545 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.038194861407583984, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.038194861407583984 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232962, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232962 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4967741935483871, + "acc_stderr": 0.02844341422643833, + "acc_norm": 0.4967741935483871, + "acc_norm_stderr": 0.02844341422643833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541198, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541198 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851316, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851316 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066492, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.025331202438944433, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.025331202438944433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5486238532110091, + "acc_stderr": 0.021335714711268786, + "acc_norm": 0.5486238532110091, + "acc_norm_stderr": 0.021335714711268786 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.01979448890002412, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.01979448890002412 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176853, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176853 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20446927374301677, + "acc_stderr": 0.013488813404711914, + "acc_norm": 0.20446927374301677, + "acc_norm_stderr": 0.013488813404711914 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.03186785930004128, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.03186785930004128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.012095592506931973, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.012095592506931973 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33659730722154224, + "mc1_stderr": 0.01654241280949487, + "mc2": 0.5139701018144873, + "mc2_stderr": 0.01637268513678025 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40968122786304606, + "acc_stderr": 0.016907568192219478, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.01701984753597221 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/PiVoT-0.1-early", + "model_sha": "6eeae58a1a292a1d7f989952a07aead6d5da3c69", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/PiVoT-10.7B-Mistral-v0.2/result_2023-12-16 03:28:52.json b/maywell/PiVoT-10.7B-Mistral-v0.2/result_2023-12-16 03:28:52.json new file mode 100644 index 0000000000000000000000000000000000000000..79e89916793488609af0b7b2600fc178934d5c4f --- /dev/null +++ b/maywell/PiVoT-10.7B-Mistral-v0.2/result_2023-12-16 03:28:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.0140702655192688, + "acc_norm": 0.4232081911262799, + "acc_norm_stderr": 0.014438036220848022 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3717386974706234, + "acc_stderr": 0.004822814501358897, + "acc_norm": 0.474407488548098, + "acc_norm_stderr": 0.0049832407441013785 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.037792759455032014, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.037792759455032014 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4776500638569604, + "acc_stderr": 0.01786209177850787, + "acc_norm": 0.4776500638569604, + "acc_norm_stderr": 0.01786209177850787 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357766, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357766 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.036807836907275814, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.036807836907275814 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.03289477330098615, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.03289477330098615 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182087, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182087 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938145, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938145 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.035333892347392454, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.035333892347392454 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699954, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142261, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142261 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33641975308641975, + "acc_stderr": 0.026289734945952926, + "acc_norm": 0.33641975308641975, + "acc_norm_stderr": 0.026289734945952926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.021410999753635914, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.021410999753635914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.01939305840235544, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.01939305840235544 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639896, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639896 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19776536312849163, + "acc_stderr": 0.013321620594050947, + "acc_norm": 0.19776536312849163, + "acc_norm_stderr": 0.013321620594050947 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.02850145286039656, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.02850145286039656 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.42616033755274263, + "acc_stderr": 0.032190357031317736, + "acc_norm": 0.42616033755274263, + "acc_norm_stderr": 0.032190357031317736 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.012002091666902312, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.012002091666902312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.48328951508321544, + "mc2_stderr": 0.015862522599324993 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4025974025974026, + "acc_stderr": 0.016861020486407786, + "acc_norm": 0.4167650531286895, + "acc_norm_stderr": 0.016950489146108833 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/PiVoT-10.7B-Mistral-v0.2", + "model_sha": "a496457d0743b6030ffbb96dad2dc6a62d143943", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-10.7B-v0.4/result_2023-12-27 12:59:51.json b/maywell/Synatra-10.7B-v0.4/result_2023-12-27 12:59:51.json new file mode 100644 index 0000000000000000000000000000000000000000..f940c28c4c558331bf47962678366e92b9132caa --- /dev/null +++ b/maywell/Synatra-10.7B-v0.4/result_2023-12-27 12:59:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46501706484641636, + "acc_stderr": 0.014575583922019667, + "acc_norm": 0.507679180887372, + "acc_norm_stderr": 0.014609667440892574 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44592710615415254, + "acc_stderr": 0.004960516570284905, + "acc_norm": 0.6014738099980084, + "acc_norm_stderr": 0.004885942040894561 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6513409961685823, + "acc_stderr": 0.017041243143490977, + "acc_norm": 0.6513409961685823, + "acc_norm_stderr": 0.017041243143490977 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995072, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995072 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.043285772152629735, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.043285772152629735 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.033711241426263035, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.033711241426263035 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.02528558599001783, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.02528558599001783 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881564, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881564 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.037786210790920545, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.037786210790920545 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752045, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.041406856391115014, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.041406856391115014 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756643, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756643 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5524691358024691, + "acc_stderr": 0.027667138569422697, + "acc_norm": 0.5524691358024691, + "acc_norm_stderr": 0.027667138569422697 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852387, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852387 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.020206653187884786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.020206653187884786 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997865, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997865 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.16871508379888267, + "acc_stderr": 0.012525156087191954, + "acc_norm": 0.16871508379888267, + "acc_norm_stderr": 0.012525156087191954 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.03160106993449601, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.03160106993449601 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955934, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564642, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564642 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.0341078533890472, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.0341078533890472 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.4537433695691716, + "mc2_stderr": 0.01529287884999072 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.017185069732676524, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.01713321827653767 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-10.7B-v0.4", + "model_sha": "a311ddd48d56f9451c96f88e8a79fad6faba476f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-11B-Tb2M_SM/result_2023-10-16 01:20:57.json b/maywell/Synatra-11B-Tb2M_SM/result_2023-10-16 01:20:57.json new file mode 100644 index 0000000000000000000000000000000000000000..f1ec3e80bdee5ac807137a77888bda3a5b656998 --- /dev/null +++ b/maywell/Synatra-11B-Tb2M_SM/result_2023-10-16 01:20:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1766211604095563, + "acc_stderr": 0.011144042769316503, + "acc_norm": 0.24146757679180889, + "acc_norm_stderr": 0.012506564839739432 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2528380800637323, + "acc_stderr": 0.004337506344899919, + "acc_norm": 0.24965146385182235, + "acc_norm_stderr": 0.004319267432460665 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803627, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803627 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.031546980450822305, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.031546980450822305 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378984, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570345, + "mc2": 0.4752303618111022, + "mc2_stderr": 0.01719345285029173 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09327036599763873, + "acc_stderr": 0.009998286190276725, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.01663791778979874 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-11B-Tb2M_SM", + "model_sha": "7f2867881e6ebd2f1383a3d0be8b5573dd4897ad", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-11B-Testbench-2/result_2023-10-16 00:23:46.json b/maywell/Synatra-11B-Testbench-2/result_2023-10-16 00:23:46.json new file mode 100644 index 0000000000000000000000000000000000000000..cd083d19189a415db51a90215fdf9142963dcb0b --- /dev/null +++ b/maywell/Synatra-11B-Testbench-2/result_2023-10-16 00:23:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145685, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.01442218122630302 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37731527584146585, + "acc_stderr": 0.00483724201519111, + "acc_norm": 0.48775144393547104, + "acc_norm_stderr": 0.0049882839816310495 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49680715197956576, + "acc_stderr": 0.01787959894593307, + "acc_norm": 0.49680715197956576, + "acc_norm_stderr": 0.01787959894593307 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330314, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330314 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518028, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518028 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307688, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307688 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.038367409078310294, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.038367409078310294 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.404320987654321, + "acc_stderr": 0.027306625297327684, + "acc_norm": 0.404320987654321, + "acc_norm_stderr": 0.027306625297327684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.021432956203453316, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.021432956203453316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949097, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949097 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.046840993210771065, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.046840993210771065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786154, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786154 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2985658409387223, + "acc_stderr": 0.011688060141794224, + "acc_norm": 0.2985658409387223, + "acc_norm_stderr": 0.011688060141794224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842881, + "mc2": 0.4342691202696536, + "mc2_stderr": 0.015037727340783071 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3860684769775679, + "acc_stderr": 0.016738130760321743, + "acc_norm": 0.4510035419126328, + "acc_norm_stderr": 0.017107618859549357 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-11B-Testbench-2", + "model_sha": "50c90dfe257d5c5ad4c3c6a1fb29f6a5066c085a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-11B-Testbench/result_2023-10-15 12:35:17.json b/maywell/Synatra-11B-Testbench/result_2023-10-15 12:35:17.json new file mode 100644 index 0000000000000000000000000000000000000000..018fafb18aca26e5fe66cebaf5f1462ce779a38e --- /dev/null +++ b/maywell/Synatra-11B-Testbench/result_2023-10-15 12:35:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3378839590443686, + "acc_stderr": 0.013822047922283509, + "acc_norm": 0.3856655290102389, + "acc_norm_stderr": 0.014224250973257177 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37024497112129057, + "acc_stderr": 0.004818833521340358, + "acc_norm": 0.4742083250348536, + "acc_norm_stderr": 0.00498313847960438 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5019157088122606, + "acc_stderr": 0.017879832259026677, + "acc_norm": 0.5019157088122606, + "acc_norm_stderr": 0.017879832259026677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.03088161852067694, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.03088161852067694 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665232, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665232 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662727, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662727 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995093, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779207, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779207 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.01924978569171721, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.01924978569171721 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650154, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650154 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.01493131670322051, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.01493131670322051 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29986962190352023, + "acc_stderr": 0.011702660860193986, + "acc_norm": 0.29986962190352023, + "acc_norm_stderr": 0.011702660860193986 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.034542365853806094, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.034542365853806094 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326902, + "mc2": 0.4475458217061865, + "mc2_stderr": 0.015253457911461817 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38961038961038963, + "acc_stderr": 0.0167661616718935, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.017142736117643297 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-11B-Testbench", + "model_sha": "9399ea6c2a1d955e31d6b4d68b2b86115aea0e59", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-42dot-1.3B/result_2023-12-01 02:14:27.json b/maywell/Synatra-42dot-1.3B/result_2023-12-01 02:14:27.json new file mode 100644 index 0000000000000000000000000000000000000000..37144160eff5f55822e98d6be366492f712cf6a3 --- /dev/null +++ b/maywell/Synatra-42dot-1.3B/result_2023-12-01 02:14:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26706484641638223, + "acc_stderr": 0.012928933196496342, + "acc_norm": 0.34897610921501704, + "acc_norm_stderr": 0.0139289334613825 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3573989245170285, + "acc_stderr": 0.004782542754102088, + "acc_norm": 0.45439155546703847, + "acc_norm_stderr": 0.004968979259738335 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21455938697318008, + "acc_stderr": 0.014680033956893346, + "acc_norm": 0.21455938697318008, + "acc_norm_stderr": 0.014680033956893346 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617721, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617721 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.024619771956697165, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.024619771956697165 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.02960510321703835, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.02960510321703835 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.02835962087053395, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.02835962087053395 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132368, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132368 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733552, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733552 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885193, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891155, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891155 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2037735849056604, + "acc_stderr": 0.02479078450177541, + "acc_norm": 0.2037735849056604, + "acc_norm_stderr": 0.02479078450177541 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275805, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275805 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573033, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788137, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788137 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.021679219663693152, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.021679219663693152 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071134, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071134 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02492200116888633, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02492200116888633 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178267, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178267 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.2036697247706422, + "acc_stderr": 0.017266742087630797, + "acc_norm": 0.2036697247706422, + "acc_norm_stderr": 0.017266742087630797 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.02342037547829613, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.02342037547829613 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2434640522875817, + "acc_stderr": 0.017362473762146623, + "acc_norm": 0.2434640522875817, + "acc_norm_stderr": 0.017362473762146623 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642962, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697626, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697626 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145287, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178475, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.4294408765617315, + "mc2_stderr": 0.015039627065597595 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791255, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.016844693510505056 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-42dot-1.3B", + "model_sha": "8342dd3132ec87f12a229f83828f55bfcc0e5814", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-Instruct-v0.2/result_2023-10-12 03:42:18.json b/maywell/Synatra-7B-Instruct-v0.2/result_2023-10-12 03:42:18.json new file mode 100644 index 0000000000000000000000000000000000000000..467327f67fbcac98194815a0ac75d8ade72fbdc8 --- /dev/null +++ b/maywell/Synatra-7B-Instruct-v0.2/result_2023-10-12 03:42:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955003, + "acc_norm": 0.4180887372013652, + "acc_norm_stderr": 0.014413988396996084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38149770961959767, + "acc_stderr": 0.00484761521647345, + "acc_norm": 0.49352718581955785, + "acc_norm_stderr": 0.004989363276955168 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5070242656449553, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.5070242656449553, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.03318833286217281, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.03318833286217281 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643945, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643945 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.02524277098712617, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.02524277098712617 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887468, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887468 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028414, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028414 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.042663394431593955, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.042663394431593955 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.021432956203453316, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.021432956203453316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094593, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094593 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3139664804469274, + "acc_stderr": 0.015521923933523635, + "acc_norm": 0.3139664804469274, + "acc_norm_stderr": 0.015521923933523635 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.03000856284500348, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.03000856284500348 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.01175993961808546, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.01175993961808546 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4577444189927008, + "mc2_stderr": 0.015214396697030213 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3754427390791027, + "acc_stderr": 0.016648411589511095, + "acc_norm": 0.43919716646989376, + "acc_norm_stderr": 0.0170627757447807 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-Instruct-v0.2", + "model_sha": "5ca980b650d75e7611bcb9299948bd86dd7bc381", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-Instruct-v0.3-pre/result_2023-10-28 02:31:59.json b/maywell/Synatra-7B-Instruct-v0.3-pre/result_2023-10-28 02:31:59.json new file mode 100644 index 0000000000000000000000000000000000000000..f04d67fe768ba451086fe5aef8887fc8df3d97b6 --- /dev/null +++ b/maywell/Synatra-7B-Instruct-v0.3-pre/result_2023-10-28 02:31:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40955631399317405, + "acc_stderr": 0.014370358632472447, + "acc_norm": 0.4726962457337884, + "acc_norm_stderr": 0.014589589101986 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4041027683728341, + "acc_stderr": 0.004897146690596247, + "acc_norm": 0.525592511451902, + "acc_norm_stderr": 0.004983240744101376 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041696, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5721583652618135, + "acc_stderr": 0.017692787927803724, + "acc_norm": 0.5721583652618135, + "acc_norm_stderr": 0.017692787927803724 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004257, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004257 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.02525303255499768, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.02525303255499768 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.026772990653361816, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.026772990653361816 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5669724770642202, + "acc_stderr": 0.021244146569074345, + "acc_norm": 0.5669724770642202, + "acc_norm_stderr": 0.021244146569074345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.01998780976948206, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.01998780976948206 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963768, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963768 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20558659217877095, + "acc_stderr": 0.013516116210724202, + "acc_norm": 0.20558659217877095, + "acc_norm_stderr": 0.013516116210724202 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6962025316455697, + "acc_stderr": 0.029936696387138625, + "acc_norm": 0.6962025316455697, + "acc_norm_stderr": 0.029936696387138625 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741523, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741523 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.01627228795791694, + "mc2": 0.47669627022567646, + "mc2_stderr": 0.015363718738683547 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4557260920897285, + "acc_stderr": 0.017122829143292648, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.017182864434998564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-Instruct-v0.3-pre", + "model_sha": "273566d120a8db90bc734aba20ef6e553ed9a9ab", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-Instruct-v0.3-pre2/result_2023-10-29 03:09:00.json b/maywell/Synatra-7B-Instruct-v0.3-pre2/result_2023-10-29 03:09:00.json new file mode 100644 index 0000000000000000000000000000000000000000..9e6a81783a63417afac190ba48bbf960a9fe510b --- /dev/null +++ b/maywell/Synatra-7B-Instruct-v0.3-pre2/result_2023-10-29 03:09:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41467576791808874, + "acc_stderr": 0.01439707056440917, + "acc_norm": 0.46075085324232085, + "acc_norm_stderr": 0.014566303676636588 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40440151364270066, + "acc_stderr": 0.0048977283707372365, + "acc_norm": 0.5306711810396335, + "acc_norm_stderr": 0.004980384575535378 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299794, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299794 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.031410821975962386, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.031410821975962386 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846475, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846475 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606649, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606649 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42328042328042326, + "acc_stderr": 0.025446365634406776, + "acc_norm": 0.42328042328042326, + "acc_norm_stderr": 0.025446365634406776 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5308641975308642, + "acc_stderr": 0.027767689606833918, + "acc_norm": 0.5308641975308642, + "acc_norm_stderr": 0.027767689606833918 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353996, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176853, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176853 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18659217877094972, + "acc_stderr": 0.013029631416358352, + "acc_norm": 0.18659217877094972, + "acc_norm_stderr": 0.013029631416358352 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.031137304297185798, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.031137304297185798 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316506, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316506 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386844, + "mc2": 0.4635110137476736, + "mc2_stderr": 0.015409043308668458 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42857142857142855, + "acc_stderr": 0.017014038119297484, + "acc_norm": 0.4651711924439197, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-Instruct-v0.3-pre2", + "model_sha": "a1d319cbfba59887acde520207c79d8057711a13", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-Instruct-v0.3/result_2023-10-29 07:25:02.json b/maywell/Synatra-7B-Instruct-v0.3/result_2023-10-29 07:25:02.json new file mode 100644 index 0000000000000000000000000000000000000000..f9a8423dfa5b3c3cda68300a2e60411ea203d039 --- /dev/null +++ b/maywell/Synatra-7B-Instruct-v0.3/result_2023-10-29 07:25:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180635, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836357 + }, + "harness|ko_hellaswag|10": { + "acc": 0.391256721768572, + "acc_stderr": 0.004870342592915051, + "acc_norm": 0.5191196972714599, + "acc_norm_stderr": 0.004986131919673969 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394216, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539743, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4967741935483871, + "acc_stderr": 0.02844341422643833, + "acc_norm": 0.4967741935483871, + "acc_norm_stderr": 0.02844341422643833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305527, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305527 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756653, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.021432956203453313, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.021432956203453313 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088837, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088837 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275941, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275941 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19776536312849163, + "acc_stderr": 0.013321620594050947, + "acc_norm": 0.19776536312849163, + "acc_norm_stderr": 0.013321620594050947 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031218, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031218 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3285528031290743, + "acc_stderr": 0.01199602724750293, + "acc_norm": 0.3285528031290743, + "acc_norm_stderr": 0.01199602724750293 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627877, + "mc2": 0.4513846769181087, + "mc2_stderr": 0.015331162068993385 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.34238488783943327, + "acc_stderr": 0.016313907844146373, + "acc_norm": 0.39315230224321135, + "acc_norm_stderr": 0.016793262801287078 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-Instruct-v0.3", + "model_sha": "2d31bde8f1bfedb47c3761918b6e3189e3f61acd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-v0.3-QA/result_2023-12-26 07:29:10.json b/maywell/Synatra-7B-v0.3-QA/result_2023-12-26 07:29:10.json new file mode 100644 index 0000000000000000000000000000000000000000..45ac6c95cbca7cd4692d58f5196b1cf790e008a2 --- /dev/null +++ b/maywell/Synatra-7B-v0.3-QA/result_2023-12-26 07:29:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4180887372013652, + "acc_stderr": 0.014413988396996076, + "acc_norm": 0.46757679180887374, + "acc_norm_stderr": 0.01458063756999542 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4078868751244772, + "acc_stderr": 0.004904375631128869, + "acc_norm": 0.5302728540131448, + "acc_norm_stderr": 0.004980627287147575 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.03833185275213025, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.03833185275213025 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.0177122289392998, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.0177122289392998 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.03343577705583065, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.03343577705583065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376556, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376556 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.032473902765696686, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.032473902765696686 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933917, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933917 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41534391534391535, + "acc_stderr": 0.02537952491077838, + "acc_norm": 0.41534391534391535, + "acc_norm_stderr": 0.02537952491077838 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353996, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225868, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225868 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401154, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401154 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639882, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489122, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489122 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289804, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18324022346368715, + "acc_stderr": 0.01293864561306638, + "acc_norm": 0.18324022346368715, + "acc_norm_stderr": 0.01293864561306638 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411945, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411945 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34028683181225555, + "acc_stderr": 0.012101217610223793, + "acc_norm": 0.34028683181225555, + "acc_norm_stderr": 0.012101217610223793 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.47247339232752167, + "mc2_stderr": 0.015527772167329246 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.0169835060795776, + "acc_norm": 0.4427390791027155, + "acc_norm_stderr": 0.01707725413155622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-v0.3-QA", + "model_sha": "28bb95667c88f4c80b3903cfb0c3a7433f821311", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-v0.3-Translation/result_2023-11-18 00:55:26.json b/maywell/Synatra-7B-v0.3-Translation/result_2023-11-18 00:55:26.json new file mode 100644 index 0000000000000000000000000000000000000000..8ddab9f79c320d2c9118054e1ef177b3f086a564 --- /dev/null +++ b/maywell/Synatra-7B-v0.3-Translation/result_2023-11-18 00:55:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3361774744027304, + "acc_stderr": 0.01380485502620576, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34993029277036447, + "acc_stderr": 0.004759729267943188, + "acc_norm": 0.45498904600677154, + "acc_norm_stderr": 0.004969521827957945 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5236270753512133, + "acc_stderr": 0.017859989765176457, + "acc_norm": 0.5236270753512133, + "acc_norm_stderr": 0.017859989765176457 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.0424463323835323, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.0424463323835323 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.028173917761762875, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.028173917761762875 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938145, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938145 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.03035152732334495, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.03035152732334495 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.0302422338008545, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.0302422338008545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228412, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228412 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596423, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596423 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.407514450867052, + "acc_stderr": 0.026454578146931494, + "acc_norm": 0.407514450867052, + "acc_norm_stderr": 0.026454578146931494 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413317, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413317 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4, + "acc_stderr": 0.021004201260420075, + "acc_norm": 0.4, + "acc_norm_stderr": 0.021004201260420075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424523, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424523 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573695, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573695 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626964, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626964 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2770949720670391, + "acc_stderr": 0.014968772435812145, + "acc_norm": 0.2770949720670391, + "acc_norm_stderr": 0.014968772435812145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.026679252270103135, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.026679252270103135 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163909, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163909 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823063004, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823063004 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22276621787025705, + "mc1_stderr": 0.014566506961396756, + "mc2": 0.3845751039570116, + "mc2_stderr": 0.015265157059591356 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30814639905548996, + "acc_stderr": 0.01587451515629839, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.016819438642971408 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-v0.3-Translation", + "model_sha": "fab3f68b4fb414d481167677d660e0fc29a47ec4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-v0.3-dpo/result_2023-11-08 10:27:00.json b/maywell/Synatra-7B-v0.3-dpo/result_2023-11-08 10:27:00.json new file mode 100644 index 0000000000000000000000000000000000000000..f3ab91d5bb76dcaab4acf093f4d30acb3e8e12ec --- /dev/null +++ b/maywell/Synatra-7B-v0.3-dpo/result_2023-11-08 10:27:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4180887372013652, + "acc_stderr": 0.014413988396996074, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007107 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40659231228838877, + "acc_stderr": 0.00490193651154613, + "acc_norm": 0.5323640709022107, + "acc_norm_stderr": 0.004979317515432522 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5798212005108557, + "acc_stderr": 0.017650651363078033, + "acc_norm": 0.5798212005108557, + "acc_norm_stderr": 0.017650651363078033 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977112, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977112 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.028397944907806612, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.028397944907806612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.032473902765696686, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.032473902765696686 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404904, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404904 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.02683080599895223, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.02683080599895223 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5504587155963303, + "acc_stderr": 0.021327881417823366, + "acc_norm": 0.5504587155963303, + "acc_norm_stderr": 0.021327881417823366 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21340782122905028, + "acc_stderr": 0.013702859932196098, + "acc_norm": 0.21340782122905028, + "acc_norm_stderr": 0.013702859932196098 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928006, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34485006518904826, + "acc_stderr": 0.012139881006287063, + "acc_norm": 0.34485006518904826, + "acc_norm_stderr": 0.012139881006287063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34394124847001223, + "mc1_stderr": 0.016629087514276754, + "mc2": 0.5118549299169702, + "mc2_stderr": 0.015922621928954366 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.017107618859549353, + "acc_norm": 0.4639905548996458, + "acc_norm_stderr": 0.017145715365486657 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-v0.3-dpo", + "model_sha": "405a4f1e6513cd1b8de5eb4e003bb49cc86d1f8a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-RP-Orca-2-7b-v0.1/result_2023-11-25 01:18:50.json b/maywell/Synatra-RP-Orca-2-7b-v0.1/result_2023-11-25 01:18:50.json new file mode 100644 index 0000000000000000000000000000000000000000..c5724c031e4c4f706d103d5236b921f2ddc1a6f2 --- /dev/null +++ b/maywell/Synatra-RP-Orca-2-7b-v0.1/result_2023-11-25 01:18:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3003412969283277, + "acc_stderr": 0.013395909309957, + "acc_norm": 0.34726962457337884, + "acc_norm_stderr": 0.013913034529620442 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3313085042820155, + "acc_stderr": 0.004697217912462989, + "acc_norm": 0.39762995419239194, + "acc_norm_stderr": 0.004884079750433874 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4112388250319285, + "acc_stderr": 0.017595971908056573, + "acc_norm": 0.4112388250319285, + "acc_norm_stderr": 0.017595971908056573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36977491961414793, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.36977491961414793, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262971, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262971 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36923076923076925, + "acc_stderr": 0.024468615241478902, + "acc_norm": 0.36923076923076925, + "acc_norm_stderr": 0.024468615241478902 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998573, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998573 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3419354838709677, + "acc_stderr": 0.02698528957655274, + "acc_norm": 0.3419354838709677, + "acc_norm_stderr": 0.02698528957655274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.38113207547169814, + "acc_stderr": 0.029890609686286616, + "acc_norm": 0.38113207547169814, + "acc_norm_stderr": 0.029890609686286616 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.046313813194254635, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.046313813194254635 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683522, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683522 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.02723741509459247, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.02723741509459247 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008585, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489359, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489359 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3743119266055046, + "acc_stderr": 0.020748959408988313, + "acc_norm": 0.3743119266055046, + "acc_norm_stderr": 0.020748959408988313 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110317, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110317 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000534, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966346, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966346 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4767932489451477, + "acc_stderr": 0.03251215201141018, + "acc_norm": 0.4767932489451477, + "acc_norm_stderr": 0.03251215201141018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2803129074315515, + "acc_stderr": 0.011471555944958623, + "acc_norm": 0.2803129074315515, + "acc_norm_stderr": 0.011471555944958623 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.034602283272391704, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.034602283272391704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326902, + "mc2": 0.45578640975104057, + "mc2_stderr": 0.016180400915364303 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3270365997638725, + "acc_stderr": 0.016129047485457022, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.01646770698152745 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-RP-Orca-2-7b-v0.1", + "model_sha": "da80bc823c407c28c464cc0547a8ed9e0ca82f79", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-V0.1-7B-Instruct/result_2023-10-10 09:40:24.json b/maywell/Synatra-V0.1-7B-Instruct/result_2023-10-10 09:40:24.json new file mode 100644 index 0000000000000000000000000000000000000000..b5f21bbbe65fa550bef7d9fd527b8d292fd37f41 --- /dev/null +++ b/maywell/Synatra-V0.1-7B-Instruct/result_2023-10-10 09:40:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3609215017064846, + "acc_stderr": 0.014034761386175452, + "acc_norm": 0.41723549488054607, + "acc_norm_stderr": 0.01440982551840308 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3813981278629755, + "acc_stderr": 0.004847372670134637, + "acc_norm": 0.49283011352320255, + "acc_norm_stderr": 0.004989268362968721 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.04931801994220416, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.04931801994220416 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468544, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461224, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461224 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808086, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808086 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833713, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833713 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983067, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983067 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927213, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927213 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422697, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422697 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384486, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.02141099975363592, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.02141099975363592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981749, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981749 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.01931267606578656, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.01931267606578656 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475363, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475363 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.02916312857067073, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.02916312857067073 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459329, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459329 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.43748297535795655, + "mc2_stderr": 0.015378495166878805 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3530106257378985, + "acc_stderr": 0.01643074598242713, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.0170725258755631 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-V0.1-7B-Instruct", + "model_sha": "ad4a0c24363b0b0b12f883c7e9e2b7d3c0667fb7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-Yi-Ko-6B/result_2023-12-04 20:13:32.json b/maywell/Synatra-Yi-Ko-6B/result_2023-12-04 20:13:32.json new file mode 100644 index 0000000000000000000000000000000000000000..58cb1414b0032f7a2fe048fdc0fbce12501b91ed --- /dev/null +++ b/maywell/Synatra-Yi-Ko-6B/result_2023-12-04 20:13:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3575085324232082, + "acc_stderr": 0.014005494275916576, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37731527584146585, + "acc_stderr": 0.004837242015191119, + "acc_norm": 0.4814777932682733, + "acc_norm_stderr": 0.004986356526063965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5874840357598978, + "acc_stderr": 0.01760414910867193, + "acc_norm": 0.5874840357598978, + "acc_norm_stderr": 0.01760414910867193 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.038743715565879536, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.038743715565879536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.028443414226438306, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.028443414226438306 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230175, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230175 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579858, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579858 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988316, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.01998780976948206, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.01998780976948206 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260657, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260657 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016633, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016633 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811224, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811224 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.011971507294982784, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.011971507294982784 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.0163226441829605, + "mc2": 0.47037314932231167, + "mc2_stderr": 0.015667471817844107 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3907910271546635, + "acc_stderr": 0.01677529846510825, + "acc_norm": 0.47461629279811096, + "acc_norm_stderr": 0.017168187201429246 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-Yi-Ko-6B", + "model_sha": "3a572e4ac6159199eec6844716963088b74330de", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-Zephyr-7B-v0.01/result_2023-11-01 00:27:10.json b/maywell/Synatra-Zephyr-7B-v0.01/result_2023-11-01 00:27:10.json new file mode 100644 index 0000000000000000000000000000000000000000..cd78023d75d8512be674fb2a90696f43872654f8 --- /dev/null +++ b/maywell/Synatra-Zephyr-7B-v0.01/result_2023-11-01 00:27:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910473, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.01449442158425651 + }, + "harness|ko_hellaswag|10": { + "acc": 0.393945429197371, + "acc_stderr": 0.004876243842318603, + "acc_norm": 0.5140410276837284, + "acc_norm_stderr": 0.004987813548019091 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.01787924897058439, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.01787924897058439 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412205, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412205 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389174, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389174 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815632, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815632 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658752, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658752 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.02501074911613759, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.02501074911613759 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005135, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171566, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171566 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.01941253924203216, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.01941253924203216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966734, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966734 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2782122905027933, + "acc_stderr": 0.014987325439963554, + "acc_norm": 0.2782122905027933, + "acc_norm_stderr": 0.014987325439963554 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.01198381980646477, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.01198381980646477 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247272, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247272 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.01613222972815506, + "mc2": 0.4754957310754645, + "mc2_stderr": 0.01547689288911229 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3907910271546635, + "acc_stderr": 0.01677529846510825, + "acc_norm": 0.4309327036599764, + "acc_norm_stderr": 0.017025558196043136 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-Zephyr-7B-v0.01", + "model_sha": "d6fba97659714b6fcb81b15acb9b5729ffada374", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-Zephyr-7B-v0.02/result_2023-11-02 02:28:13.json b/maywell/Synatra-Zephyr-7B-v0.02/result_2023-11-02 02:28:13.json new file mode 100644 index 0000000000000000000000000000000000000000..0ec9c41d7291f0da31a487c8fd7bb16d515941dd --- /dev/null +++ b/maywell/Synatra-Zephyr-7B-v0.02/result_2023-11-02 02:28:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.0143120945579467, + "acc_norm": 0.4616040955631399, + "acc_norm_stderr": 0.014568245550296356 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3943437562238598, + "acc_stderr": 0.004877104939356235, + "acc_norm": 0.5110535749850628, + "acc_norm_stderr": 0.004988561944277397 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5070242656449553, + "acc_stderr": 0.017878199003432217, + "acc_norm": 0.5070242656449553, + "acc_norm_stderr": 0.017878199003432217 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349472, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.02501074911613759, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.02501074911613759 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336182, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336182 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.027563010971606676, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.027563010971606676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5064220183486239, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.5064220183486239, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171566, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171566 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449845, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449845 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320207, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320207 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22905027932960895, + "acc_stderr": 0.014054314935614553, + "acc_norm": 0.22905027932960895, + "acc_norm_stderr": 0.014054314935614553 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.027778298701545443, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.027778298701545443 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214933, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.4569644659417388, + "mc2_stderr": 0.01530492284436498 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42384887839433294, + "acc_stderr": 0.016989810834628256, + "acc_norm": 0.4592680047225502, + "acc_norm_stderr": 0.017133218276537673 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-Zephyr-7B-v0.02", + "model_sha": "5b6d8ea1b45ed20c3ecf0ddb6cc41141250352ca", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra_TbST02M_IN01/result_2023-10-16 09:50:44.json b/maywell/Synatra_TbST02M_IN01/result_2023-10-16 09:50:44.json new file mode 100644 index 0000000000000000000000000000000000000000..9d580520178c1f1e95d1221a9d99b9033dd0679b --- /dev/null +++ b/maywell/Synatra_TbST02M_IN01/result_2023-10-16 09:50:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3097269624573379, + "acc_stderr": 0.01351205841523836, + "acc_norm": 0.38310580204778155, + "acc_norm_stderr": 0.014206472661672877 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35331607249551883, + "acc_stderr": 0.004770229206838901, + "acc_norm": 0.4451304521011751, + "acc_norm_stderr": 0.004959645263390238 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49169859514687103, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.49169859514687103, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.03011821010694265, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.03011821010694265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.023973861998992062, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.023973861998992062 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5064220183486239, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.5064220183486239, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.040260970832965585, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.040260970832965585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.01950629169395486, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.01950629169395486 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3039106145251397, + "acc_stderr": 0.01538284558758452, + "acc_norm": 0.3039106145251397, + "acc_norm_stderr": 0.01538284558758452 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.01171714875164844, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.01171714875164844 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.016466769613698293, + "mc2": 0.5058685155948915, + "mc2_stderr": 0.01583111147395693 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29043683589138136, + "acc_stderr": 0.01560760256981463, + "acc_norm": 0.38134592680047225, + "acc_norm_stderr": 0.01669930176882808 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra_TbST02M_IN01", + "model_sha": "ba8eef9720471e65dc86e856d2a3812da8b53527", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra_TbST11B_EP01/result_2023-10-18 07:35:40.json b/maywell/Synatra_TbST11B_EP01/result_2023-10-18 07:35:40.json new file mode 100644 index 0000000000000000000000000000000000000000..3768b9cd40b203d165bdf3045bf51101b0ef7856 --- /dev/null +++ b/maywell/Synatra_TbST11B_EP01/result_2023-10-18 07:35:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179344, + "acc_norm": 0.40784982935153585, + "acc_norm_stderr": 0.014361097288449691 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36875124477195775, + "acc_stderr": 0.0048148030984368085, + "acc_norm": 0.4722166899024099, + "acc_norm_stderr": 0.004982072108448084 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.01787084750608173, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.01787084750608173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.03777798822748016, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.03777798822748016 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.045338381959297736, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.045338381959297736 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.02818173972001941, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.02818173972001941 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.030767394707808107, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.030767394707808107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815642, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815642 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362223, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362223 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147124, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147124 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483184, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483184 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529658, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529658 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23016759776536314, + "acc_stderr": 0.014078339253425807, + "acc_norm": 0.23016759776536314, + "acc_norm_stderr": 0.014078339253425807 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3155149934810952, + "acc_stderr": 0.011869184843058643, + "acc_norm": 0.3155149934810952, + "acc_norm_stderr": 0.011869184843058643 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326912, + "mc2": 0.4484601943910918, + "mc2_stderr": 0.015458891626438749 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3482880755608028, + "acc_stderr": 0.01637992673914804, + "acc_norm": 0.39787485242030696, + "acc_norm_stderr": 0.016827959054733388 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra_TbST11B_EP01", + "model_sha": "76b40393481aa567733eff5107dd4b6944862fdd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/koOpenChat-sft/result_2023-11-14 13:43:42.json b/maywell/koOpenChat-sft/result_2023-11-14 13:43:42.json new file mode 100644 index 0000000000000000000000000000000000000000..28bd2ede500410d61b52f46d41cb1714f99f3a48 --- /dev/null +++ b/maywell/koOpenChat-sft/result_2023-11-14 13:43:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.014280522667467328, + "acc_norm": 0.44880546075085326, + "acc_norm_stderr": 0.014534599585097669 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37860983867755427, + "acc_stderr": 0.004840493603166217, + "acc_norm": 0.4903405696076479, + "acc_norm_stderr": 0.004988850185477489 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.017879248970584356, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.017879248970584356 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079023, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5434083601286174, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.5434083601286174, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234353, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234353 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.02891120880274946, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.02891120880274946 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808086, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808086 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.029560707392465718, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.029560707392465718 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.035339990940656964, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.035339990940656964 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5559633027522936, + "acc_stderr": 0.021302621211654518, + "acc_norm": 0.5559633027522936, + "acc_norm_stderr": 0.021302621211654518 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.01414957534897627, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.01414957534897627 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411127, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411127 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741523, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741523 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.4711085048103087, + "mc2_stderr": 0.015340563202217064 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32113341204250295, + "acc_stderr": 0.016052762579111573, + "acc_norm": 0.36835891381345925, + "acc_norm_stderr": 0.016583858982639074 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/koOpenChat-sft", + "model_sha": "47472b36e181694422564b130ee075ffa596537d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/ko_ocgn_ep0-4/result_2023-11-11 23:51:11.json b/maywell/ko_ocgn_ep0-4/result_2023-11-11 23:51:11.json new file mode 100644 index 0000000000000000000000000000000000000000..413674674ea4c8697176915c1d5b6cf3c832ab79 --- /dev/null +++ b/maywell/ko_ocgn_ep0-4/result_2023-11-11 23:51:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142818, + "acc_norm": 0.43600682593856654, + "acc_norm_stderr": 0.014491225699230916 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36058554072893845, + "acc_stderr": 0.0047918906258342, + "acc_norm": 0.45558653654650466, + "acc_norm_stderr": 0.004970057183367316 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.01787469866749135, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.01787469866749135 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.02804339985821063, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.02804339985821063 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.02508830145469484, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.02508830145469484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112728, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112728 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942638, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942638 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.02882088466625326, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.02882088466625326 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379414, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379414 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5229357798165137, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.5229357798165137, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.01957695312208884, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.01957695312208884 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614095, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21452513966480447, + "acc_stderr": 0.013728923407828846, + "acc_norm": 0.21452513966480447, + "acc_norm_stderr": 0.013728923407828846 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195983, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195983 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015473, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015473 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713613, + "mc2": 0.47666728212495557, + "mc2_stderr": 0.015425276853126389 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3789846517119244, + "acc_stderr": 0.01667926068422929, + "acc_norm": 0.40968122786304606, + "acc_norm_stderr": 0.016907568192219474 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/ko_ocgn_ep0-4", + "model_sha": "77ac30be06896d5a51a90fe5c36bcab90b26f202", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/ko_ocgn_ep1/result_2023-11-12 23:44:36.json b/maywell/ko_ocgn_ep1/result_2023-11-12 23:44:36.json new file mode 100644 index 0000000000000000000000000000000000000000..19d5e92304ed5aebdf0db5a79f1c703ef5be5f49 --- /dev/null +++ b/maywell/ko_ocgn_ep1/result_2023-11-12 23:44:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180639, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306863 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37661820354511055, + "acc_stderr": 0.004835475957610931, + "acc_norm": 0.4816769567815176, + "acc_norm_stderr": 0.004986429808146771 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.017879248970584384, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.017879248970584384 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.02974504857267408, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.02974504857267408 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.02497695405315525, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.02497695405315525 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5522935779816514, + "acc_stderr": 0.021319754962425455, + "acc_norm": 0.5522935779816514, + "acc_norm_stderr": 0.021319754962425455 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094597, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094597 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650158, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650158 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.01450897945355398, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.01450897945355398 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776132, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776132 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344211, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344211 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.4458984547809422, + "mc2_stderr": 0.015245099119242699 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3872491145218418, + "acc_stderr": 0.01674757799164279, + "acc_norm": 0.4167650531286895, + "acc_norm_stderr": 0.016950489146108826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/ko_ocgn_ep1", + "model_sha": "ebbb63249672295f49e791b11c7204582bfb5383", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/synatra_V0.01/result_2023-10-07 13:31:59.json b/maywell/synatra_V0.01/result_2023-10-07 13:31:59.json new file mode 100644 index 0000000000000000000000000000000000000000..be02eab2a5a952083cd5d7c790b14a8dae4d02bc --- /dev/null +++ b/maywell/synatra_V0.01/result_2023-10-07 13:31:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2901023890784983, + "acc_stderr": 0.013261573677520769, + "acc_norm": 0.3412969283276451, + "acc_norm_stderr": 0.01385583128749772 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33210515833499304, + "acc_stderr": 0.00470005967137463, + "acc_norm": 0.41585341565425216, + "acc_norm_stderr": 0.004918612098944034 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3997445721583653, + "acc_stderr": 0.01751684790705327, + "acc_norm": 0.3997445721583653, + "acc_norm_stderr": 0.01751684790705327 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.02801365189199507, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.02801365189199507 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34080717488789236, + "acc_stderr": 0.03181149747055359, + "acc_norm": 0.34080717488789236, + "acc_norm_stderr": 0.03181149747055359 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182087, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182087 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38974358974358975, + "acc_stderr": 0.024726967886647074, + "acc_norm": 0.38974358974358975, + "acc_norm_stderr": 0.024726967886647074 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539635, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.047245774057315705, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.047245774057315705 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815642, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815642 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.02455229220934266, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.02455229220934266 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.36419753086419754, + "acc_stderr": 0.026774929899722327, + "acc_norm": 0.36419753086419754, + "acc_norm_stderr": 0.026774929899722327 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4055045871559633, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.4055045871559633, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.02778014120702333, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.02778014120702333 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.018875682938069443, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.018875682938069443 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3106145251396648, + "acc_stderr": 0.015476515438005566, + "acc_norm": 0.3106145251396648, + "acc_norm_stderr": 0.015476515438005566 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4767932489451477, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.4767932489451477, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28683181225554105, + "acc_stderr": 0.011551504781176933, + "acc_norm": 0.28683181225554105, + "acc_norm_stderr": 0.011551504781176933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.03713158067481913, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.03713158067481913 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32313341493268055, + "mc1_stderr": 0.016371836286454614, + "mc2": 0.4992370707389853, + "mc2_stderr": 0.01568220201461622 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3293978748524203, + "acc_stderr": 0.016158746868147143, + "acc_norm": 0.43211334120425027, + "acc_norm_stderr": 0.017031170198851753 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/synatra_V0.01", + "model_sha": "c27df4dbc7624ea0bcbf0b0ff149d49b58713a4e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/metterian/llama-2-7b-pt/result_2023-11-24 06:52:27.json b/metterian/llama-2-7b-pt/result_2023-11-24 06:52:27.json new file mode 100644 index 0000000000000000000000000000000000000000..218a6624b47f5f075e4deee0b5594e455ffa5799 --- /dev/null +++ b/metterian/llama-2-7b-pt/result_2023-11-24 06:52:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2022184300341297, + "acc_stderr": 0.011737454431872105, + "acc_norm": 0.2354948805460751, + "acc_norm_stderr": 0.012399451855004753 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25144393547102173, + "acc_stderr": 0.0043295650165273165, + "acc_norm": 0.2604062935670185, + "acc_norm_stderr": 0.004379594059141041 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245232, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245232 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777562, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777562 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.03455473702325438, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03455473702325438 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039776, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039776 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064536, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.024619771956697168, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.024619771956697168 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.02944249558585747, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.02944249558585747 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969195, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969195 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18686868686868688, + "acc_stderr": 0.027772533334218967, + "acc_norm": 0.18686868686868688, + "acc_norm_stderr": 0.027772533334218967 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774632, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774632 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.18907563025210083, + "acc_stderr": 0.02543511943810535, + "acc_norm": 0.18907563025210083, + "acc_norm_stderr": 0.02543511943810535 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2153846153846154, + "acc_stderr": 0.020843034557462874, + "acc_norm": 0.2153846153846154, + "acc_norm_stderr": 0.020843034557462874 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.04453197507374983, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.04453197507374983 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.02850137816789395, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.02850137816789395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1967741935483871, + "acc_stderr": 0.022616409420742025, + "acc_norm": 0.1967741935483871, + "acc_norm_stderr": 0.022616409420742025 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276613, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276613 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.03336767086567977, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.03336767086567977 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1791907514450867, + "acc_stderr": 0.029242513059063287, + "acc_norm": 0.1791907514450867, + "acc_norm_stderr": 0.029242513059063287 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.02344582627654554, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.02344582627654554 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.20245398773006135, + "acc_stderr": 0.03157065078911901, + "acc_norm": 0.20245398773006135, + "acc_norm_stderr": 0.03157065078911901 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24691358024691357, + "acc_stderr": 0.02399350170904211, + "acc_norm": 0.24691358024691357, + "acc_norm_stderr": 0.02399350170904211 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.18652849740932642, + "acc_stderr": 0.02811209121011746, + "acc_norm": 0.18652849740932642, + "acc_norm_stderr": 0.02811209121011746 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1944954128440367, + "acc_stderr": 0.016970289090458043, + "acc_norm": 0.1944954128440367, + "acc_norm_stderr": 0.016970289090458043 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.02355083135199509, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.02355083135199509 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.040261875275912046, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.040261875275912046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.031103182383123377, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.031103182383123377 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.02646903681859062, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.02646903681859062 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.02453632602613424, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.02453632602613424 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21899441340782122, + "acc_stderr": 0.013831676687303198, + "acc_norm": 0.21899441340782122, + "acc_norm_stderr": 0.013831676687303198 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.0242310133705411, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.0242310133705411 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.02412746346265015, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.02412746346265015 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.010865436690780262, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.010865436690780262 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251728, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251728 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.03158415324047711, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.03158415324047711 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22399020807833536, + "mc1_stderr": 0.014594964329474205, + "mc2": 0.4652172706232095, + "mc2_stderr": 0.016830514037368852 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07792207792207792, + "acc_stderr": 0.009215711972304702, + "acc_norm": 0.2538370720188902, + "acc_norm_stderr": 0.014962672739769986 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "metterian/llama-2-7b-pt", + "model_sha": "92ac9b01be1d6c949d56eb45b4a25e0103d4d31e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/metterian/llama-2-ko-7b-pt/result_2023-11-25 16:20:36.json b/metterian/llama-2-ko-7b-pt/result_2023-11-25 16:20:36.json new file mode 100644 index 0000000000000000000000000000000000000000..9745efa26acd77620375a921d8067d393ec4396e --- /dev/null +++ b/metterian/llama-2-ko-7b-pt/result_2023-11-25 16:20:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25426621160409557, + "acc_stderr": 0.012724999945157736, + "acc_norm": 0.3122866894197952, + "acc_norm_stderr": 0.013542598541688065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3370842461661024, + "acc_stderr": 0.004717478335689621, + "acc_norm": 0.41565425214100776, + "acc_norm_stderr": 0.004918272352137549 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03188578017686398, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03188578017686398 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.0376017800602662, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.0376017800602662 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2848020434227331, + "acc_stderr": 0.016139174096522574, + "acc_norm": 0.2848020434227331, + "acc_norm_stderr": 0.016139174096522574 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380056, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380056 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998483, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998483 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289202, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289202 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.19083969465648856, + "acc_stderr": 0.03446513350752599, + "acc_norm": 0.19083969465648856, + "acc_norm_stderr": 0.03446513350752599 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2, + "acc_stderr": 0.0333333333333333, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0333333333333333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886838, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886838 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3435897435897436, + "acc_stderr": 0.02407869658063548, + "acc_norm": 0.3435897435897436, + "acc_norm_stderr": 0.02407869658063548 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.02598850079241188, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.02598850079241188 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33584905660377357, + "acc_stderr": 0.029067220146644826, + "acc_norm": 0.33584905660377357, + "acc_norm_stderr": 0.029067220146644826 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357303, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357303 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258176, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258176 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.033220157957767414, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.033220157957767414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32642487046632124, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.32642487046632124, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26055045871559634, + "acc_stderr": 0.01881918203485007, + "acc_norm": 0.26055045871559634, + "acc_norm_stderr": 0.01881918203485007 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.02367908986180772, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.02367908986180772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.042943408452120926, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.042943408452120926 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.01703522925803403, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.01703522925803403 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.03119223072679566, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.03119223072679566 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.01086543669078026, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.01086543669078026 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2107843137254902, + "acc_stderr": 0.02862654791243739, + "acc_norm": 0.2107843137254902, + "acc_norm_stderr": 0.02862654791243739 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.03256866661681102, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.03256866661681102 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715008, + "mc2": 0.40455212863354156, + "mc2_stderr": 0.015092027959978592 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30342384887839435, + "acc_stderr": 0.015806072717909576, + "acc_norm": 0.3730814639905549, + "acc_norm_stderr": 0.016627318275137425 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "metterian/llama-2-ko-7b-pt", + "model_sha": "4fc8a514d966d245f4f4f0e8ffca374aadb2b069", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/metterian/polyglot-ko-kullm-v2-fix/result_2023-11-03 06:21:31.json b/metterian/polyglot-ko-kullm-v2-fix/result_2023-11-03 06:21:31.json new file mode 100644 index 0000000000000000000000000000000000000000..89fc54d75bdf01dfceda185f417f116baa2eb271 --- /dev/null +++ b/metterian/polyglot-ko-kullm-v2-fix/result_2023-11-03 06:21:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623504, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.013936809212158284 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38697470623381797, + "acc_stderr": 0.004860623733461129, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249536 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.36257309941520466, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.36257309941520466, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326467, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326467 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21711366538952745, + "acc_stderr": 0.014743125394823291, + "acc_norm": 0.21711366538952745, + "acc_norm_stderr": 0.014743125394823291 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.03047297336338005, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.03047297336338005 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.0357160923005348, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.0357160923005348 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.02461977195669716, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.02461977195669716 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.029442495585857473, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.029442495585857473 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.029620227874790482, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.029620227874790482 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361273, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361273 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.02136202772522272, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.02136202772522272 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.02489246917246283, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.02489246917246283 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.02948036054954119, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.02948036054954119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.02700876609070809, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.02700876609070809 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.038950910157241364, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.038950910157241364 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.02504044387700069, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.02504044387700069 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389986, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389986 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772436, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772436 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641143, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641143 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.02084229093011466, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.02084229093011466 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322716, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322716 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.25722543352601157, + "acc_stderr": 0.023532925431044287, + "acc_norm": 0.25722543352601157, + "acc_norm_stderr": 0.023532925431044287 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765127, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765127 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19170984455958548, + "acc_stderr": 0.028408953626245282, + "acc_norm": 0.19170984455958548, + "acc_norm_stderr": 0.028408953626245282 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25137614678899084, + "acc_stderr": 0.018599206360287415, + "acc_norm": 0.25137614678899084, + "acc_norm_stderr": 0.018599206360287415 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.0180540274588152, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.0180540274588152 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432403, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103987, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103987 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866764, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.02765215314415926, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.02765215314415926 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25358539765319427, + "acc_stderr": 0.011111715336101136, + "acc_norm": 0.25358539765319427, + "acc_norm_stderr": 0.011111715336101136 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731606, + "mc2": 0.39703809921502775, + "mc2_stderr": 0.01577852528205582 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32231404958677684, + "acc_stderr": 0.016068253615813967, + "acc_norm": 0.3612750885478158, + "acc_norm_stderr": 0.016515463022412 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "metterian/polyglot-ko-kullm-v2-fix", + "model_sha": "cd7387406ec0e3262d718583f5c204ec6493f5e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/microsoft/Orca-2-7b/result_2023-11-21 08:49:11.json b/microsoft/Orca-2-7b/result_2023-11-21 08:49:11.json new file mode 100644 index 0000000000000000000000000000000000000000..a023ce26d64a78b7c4d222f006887d0dbf509bad --- /dev/null +++ b/microsoft/Orca-2-7b/result_2023-11-21 08:49:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29692832764505117, + "acc_stderr": 0.013352025976725225, + "acc_norm": 0.33361774744027306, + "acc_norm_stderr": 0.013778687054176536 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3337980481975702, + "acc_stderr": 0.004706048116764943, + "acc_norm": 0.398725353515236, + "acc_norm_stderr": 0.004886353563571845 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41890166028097064, + "acc_stderr": 0.01764320505237717, + "acc_norm": 0.41890166028097064, + "acc_norm_stderr": 0.01764320505237717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.38263665594855306, + "acc_stderr": 0.027604689028581993, + "acc_norm": 0.38263665594855306, + "acc_norm_stderr": 0.027604689028581993 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.040287315329475604, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.040287315329475604 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.024756000382130945, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.024756000382130945 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.0302422338008545, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.0302422338008545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547307, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547307 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.026636539741116076, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.026636539741116076 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.027272582849839792, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.027272582849839792 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3779816513761468, + "acc_stderr": 0.020789187066728113, + "acc_norm": 0.3779816513761468, + "acc_norm_stderr": 0.020789187066728113 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.027780141207023327, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.027780141207023327 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.018718067052623223, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.018718067052623223 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.040598672469526864, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.040598672469526864 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012404, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012404 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369922, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369922 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29044117647058826, + "acc_stderr": 0.02757646862274051, + "acc_norm": 0.29044117647058826, + "acc_norm_stderr": 0.02757646862274051 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.011559337355708507, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.011559337355708507 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.45223449862482046, + "mc2_stderr": 0.016160782909726883 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.016021427055309578, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.016366945603281276 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "microsoft/Orca-2-7b", + "model_sha": "07bbfb8d4e051dbef7a30e5b2cb3816260d917d3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mistralai/Mistral-7B-Instruct-v0.1/result_2023-10-16 06:07:31.json b/mistralai/Mistral-7B-Instruct-v0.1/result_2023-10-16 06:07:31.json new file mode 100644 index 0000000000000000000000000000000000000000..2ae55f2b9e80919e6d6eb1eacd3a3d4f24d0a219 --- /dev/null +++ b/mistralai/Mistral-7B-Instruct-v0.1/result_2023-10-16 06:07:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2645051194539249, + "acc_stderr": 0.012889272949313368, + "acc_norm": 0.32849829351535836, + "acc_norm_stderr": 0.013724978465537378 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32682732523401714, + "acc_stderr": 0.004680949283855315, + "acc_norm": 0.3868751244771958, + "acc_norm_stderr": 0.004860393011974685 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066163, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066163 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.04931801994220414, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.04931801994220414 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38569604086845466, + "acc_stderr": 0.01740647661921291, + "acc_norm": 0.38569604086845466, + "acc_norm_stderr": 0.01740647661921291 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.036293353299478595, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.036293353299478595 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.027982680459759563, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.027982680459759563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03540294377095367, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03540294377095367 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793254, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793254 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830524, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830524 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.033442837442804574, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.033442837442804574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3774193548387097, + "acc_stderr": 0.027575960723278253, + "acc_norm": 0.3774193548387097, + "acc_norm_stderr": 0.027575960723278253 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.031937057262002924, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.031937057262002924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718323, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718323 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.47761194029850745, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.47761194029850745, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972602, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.02642481659400985, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.02642481659400985 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.038020681028996146, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.038020681028996146 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409814, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409814 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3834862385321101, + "acc_stderr": 0.020847156641915984, + "acc_norm": 0.3834862385321101, + "acc_norm_stderr": 0.020847156641915984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351585, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.01863559403442397, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.01863559403442397 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.043642261558410445, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.043642261558410445 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31843575418994413, + "acc_stderr": 0.015581008080360274, + "acc_norm": 0.31843575418994413, + "acc_norm_stderr": 0.015581008080360274 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.032498227183013026, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.032498227183013026 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2966101694915254, + "acc_stderr": 0.011665946586082868, + "acc_norm": 0.2966101694915254, + "acc_norm_stderr": 0.011665946586082868 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03283472056108567, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03283472056108567 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.0372820699868265, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.0372820699868265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3243574051407589, + "mc1_stderr": 0.01638797677964793, + "mc2": 0.49917419306073907, + "mc2_stderr": 0.016202138687957245 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2857142857142857, + "acc_stderr": 0.015531620786986743, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.016467706981527445 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mistralai/Mistral-7B-Instruct-v0.1", + "model_sha": "7ad5799710574ba1c1d953eba3077af582f3a773", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mistralai/Mistral-7B-v0.1/result_2023-10-06 00:26:01.json b/mistralai/Mistral-7B-v0.1/result_2023-10-06 00:26:01.json new file mode 100644 index 0000000000000000000000000000000000000000..32a007d17a99baf43a51f8e3106166ae88d2cd09 --- /dev/null +++ b/mistralai/Mistral-7B-v0.1/result_2023-10-06 00:26:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.01379618294778556, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668526 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3703445528779128, + "acc_stderr": 0.004819100456867818, + "acc_norm": 0.481876120294762, + "acc_norm_stderr": 0.004986502296931182 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468544, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214338, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142628, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142628 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.01980828131744984, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.01980828131744984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643525, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643525 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.34413407821229053, + "acc_stderr": 0.015889221313307094, + "acc_norm": 0.34413407821229053, + "acc_norm_stderr": 0.015889221313307094 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877743, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.03175195237583323, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.03175195237583323 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666535, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666535 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.4613168911756529, + "mc2_stderr": 0.015417066073991514 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654273, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mistralai/Mistral-7B-v0.1", + "model_sha": "5e9c98b96d071dce59368012254c55b0ec6f8658", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-alpaca-1k/result_2023-10-22 05:59:52.json b/mncai/Mistral-7B-v0.1-alpaca-1k/result_2023-10-22 05:59:52.json new file mode 100644 index 0000000000000000000000000000000000000000..8d71e8c389d9c24c90f1b3fdb960d6e734884065 --- /dev/null +++ b/mncai/Mistral-7B-v0.1-alpaca-1k/result_2023-10-22 05:59:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3046075085324232, + "acc_stderr": 0.013449522109932487, + "acc_norm": 0.3438566552901024, + "acc_norm_stderr": 0.013880644570156222 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36875124477195775, + "acc_stderr": 0.0048148030984368154, + "acc_norm": 0.4697271459868552, + "acc_norm_stderr": 0.00498062728714758 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.01783579880629064, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.01783579880629064 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079021, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079021 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539743, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561053, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561053 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674078, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.0302422338008545, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.0302422338008545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.027563010971606676, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.027563010971606676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45504587155963305, + "acc_stderr": 0.021350503090925167, + "acc_norm": 0.45504587155963305, + "acc_norm_stderr": 0.021350503090925167 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.041349130183033156, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.041349130183033156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.01962744474841223, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.01962744474841223 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.014333522059217892, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.014333522059217892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983583, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983583 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.0325446201076786, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.0325446201076786 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2985658409387223, + "acc_stderr": 0.011688060141794231, + "acc_norm": 0.2985658409387223, + "acc_norm_stderr": 0.011688060141794231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559693, + "mc2": 0.48747691141114763, + "mc2_stderr": 0.015615664106933899 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.017188904359077318, + "acc_norm": 0.5301062573789846, + "acc_norm_stderr": 0.017159163590170216 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Mistral-7B-v0.1-alpaca-1k", + "model_sha": "97a2cb89d4f19712842c4e29c44e1b7821905fac", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-combine-1k/result_2023-10-22 06:02:36.json b/mncai/Mistral-7B-v0.1-combine-1k/result_2023-10-22 06:02:36.json new file mode 100644 index 0000000000000000000000000000000000000000..deb0531f4f37afdfe292df2ced5b71341667e647 --- /dev/null +++ b/mncai/Mistral-7B-v0.1-combine-1k/result_2023-10-22 06:02:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.3515358361774744, + "acc_norm_stderr": 0.013952413699600938 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3610834495120494, + "acc_stderr": 0.004793330525656211, + "acc_norm": 0.45120493925512845, + "acc_norm_stderr": 0.004965963647210315 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2503192848020434, + "acc_stderr": 0.01549108895149458, + "acc_norm": 0.2503192848020434, + "acc_norm_stderr": 0.01549108895149458 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.026355158413349424, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.026355158413349424 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789396, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789396 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.1210762331838565, + "acc_stderr": 0.021894174113185737, + "acc_norm": 0.1210762331838565, + "acc_norm_stderr": 0.021894174113185737 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.04039314978724561, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.04039314978724561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3686868686868687, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.3686868686868687, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.04878608714466996, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.04878608714466996 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052452, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052452 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2094017094017094, + "acc_stderr": 0.026655699653922737, + "acc_norm": 0.2094017094017094, + "acc_norm_stderr": 0.026655699653922737 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443866, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443866 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555401, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555401 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577656, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577656 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22832369942196531, + "acc_stderr": 0.022598703804321624, + "acc_norm": 0.22832369942196531, + "acc_norm_stderr": 0.022598703804321624 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400572, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400572 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3559633027522936, + "acc_stderr": 0.020528559278244214, + "acc_norm": 0.3559633027522936, + "acc_norm_stderr": 0.020528559278244214 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279053, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279053 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.016774672365468517, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.016774672365468517 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226673, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.0356236785009539, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.0356236785009539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045509, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045509 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.4616568963266555, + "mc2_stderr": 0.01577378737316958 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3659976387249115, + "acc_stderr": 0.01656148966489569, + "acc_norm": 0.44037780401416765, + "acc_norm_stderr": 0.017067699774312987 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Mistral-7B-v0.1-combine-1k", + "model_sha": "0f7abf5c07a7f3add4c89c9e3525f29ab89be562", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-orca-1k/result_2023-10-22 04:26:18.json b/mncai/Mistral-7B-v0.1-orca-1k/result_2023-10-22 04:26:18.json new file mode 100644 index 0000000000000000000000000000000000000000..45cf736fa59311f9ba26684df5c57087bb34364d --- /dev/null +++ b/mncai/Mistral-7B-v0.1-orca-1k/result_2023-10-22 04:26:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623494, + "acc_norm": 0.3575085324232082, + "acc_norm_stderr": 0.014005494275916573 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37064329814777935, + "acc_stderr": 0.004819899945342492, + "acc_norm": 0.4643497311292571, + "acc_norm_stderr": 0.004977081808179427 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.04721188506097173, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.04721188506097173 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41507024265644954, + "acc_stderr": 0.01762013700365527, + "acc_norm": 0.41507024265644954, + "acc_norm_stderr": 0.01762013700365527 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745643, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745643 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824664, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824664 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36012861736334406, + "acc_stderr": 0.027264297599804012, + "acc_norm": 0.36012861736334406, + "acc_norm_stderr": 0.027264297599804012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.033832012232444426, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.033832012232444426 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138622, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138622 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121633, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121633 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3128205128205128, + "acc_stderr": 0.02350757902064535, + "acc_norm": 0.3128205128205128, + "acc_norm_stderr": 0.02350757902064535 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.033661244890514495, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.033661244890514495 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043840985, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043840985 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30566037735849055, + "acc_stderr": 0.028353298073322663, + "acc_norm": 0.30566037735849055, + "acc_norm_stderr": 0.028353298073322663 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425464, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425464 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02391998416404774, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02391998416404774 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.025906632631016127, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.025906632631016127 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3765432098765432, + "acc_stderr": 0.02695934451874778, + "acc_norm": 0.3765432098765432, + "acc_norm_stderr": 0.02695934451874778 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089116, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089116 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3321100917431193, + "acc_stderr": 0.020192682985423337, + "acc_norm": 0.3321100917431193, + "acc_norm_stderr": 0.020192682985423337 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.034550710191021475, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.034550710191021475 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.027121956071388852, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.027121956071388852 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319771, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319771 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.01869085027359529, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.01869085027359529 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150381, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150381 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364548, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364548 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.026799562024887674, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.026799562024887674 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27346938775510204, + "acc_stderr": 0.028535560337128438, + "acc_norm": 0.27346938775510204, + "acc_norm_stderr": 0.028535560337128438 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.34177215189873417, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.34177215189873417, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26792698826597133, + "acc_stderr": 0.011311347690633885, + "acc_norm": 0.26792698826597133, + "acc_norm_stderr": 0.011311347690633885 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.03182231867647553, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.03182231867647553 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391244, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391244 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875835, + "mc2": 0.4450037389871468, + "mc2_stderr": 0.01574377596952645 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.015141752199573208, + "acc_norm": 0.3624557260920897, + "acc_norm_stderr": 0.016527131240453716 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Mistral-7B-v0.1-orca-1k", + "model_sha": "3bfedee0d952da852fefa84e70f6373174a1deaf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-orca-2k/result_2023-10-22 06:00:38.json b/mncai/Mistral-7B-v0.1-orca-2k/result_2023-10-22 06:00:38.json new file mode 100644 index 0000000000000000000000000000000000000000..4400fe31d129677847d2962c4c36755bd592bd14 --- /dev/null +++ b/mncai/Mistral-7B-v0.1-orca-2k/result_2023-10-22 06:00:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.35580204778157, + "acc_norm_stderr": 0.013990571137918763 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37293367855008963, + "acc_stderr": 0.004825963768772218, + "acc_norm": 0.4615614419438359, + "acc_norm_stderr": 0.004975014529648631 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.04931801994220414, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.04931801994220414 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4227330779054917, + "acc_stderr": 0.017665180351954062, + "acc_norm": 0.4227330779054917, + "acc_norm_stderr": 0.017665180351954062 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330315, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330315 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009225, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009225 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985905, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985905 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228412, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228412 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165581, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165581 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3959537572254335, + "acc_stderr": 0.02632981334194626, + "acc_norm": 0.3959537572254335, + "acc_norm_stderr": 0.02632981334194626 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02657148348071997, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02657148348071997 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579859, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579859 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.038607315993160904, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.038607315993160904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281274, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281274 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220513, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220513 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.03119223072679566, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.03119223072679566 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29139504563233376, + "acc_stderr": 0.011605720214257615, + "acc_norm": 0.29139504563233376, + "acc_norm_stderr": 0.011605720214257615 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559693, + "mc2": 0.49057702125408326, + "mc2_stderr": 0.015830176414087203 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346456, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.01663791778979874 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Mistral-7B-v0.1-orca-2k", + "model_sha": "60e3c4f7563c49c1c03e6e7ea873148ebc240b8d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-orca_platy-1k/result_2023-10-22 05:06:43.json b/mncai/Mistral-7B-v0.1-orca_platy-1k/result_2023-10-22 05:06:43.json new file mode 100644 index 0000000000000000000000000000000000000000..dd05815ebbbc868c2385d5fc4910de56a3e68214 --- /dev/null +++ b/mncai/Mistral-7B-v0.1-orca_platy-1k/result_2023-10-22 05:06:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2977815699658703, + "acc_stderr": 0.013363080107244487, + "acc_norm": 0.3293515358361775, + "acc_norm_stderr": 0.013734057652635474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36168094005178253, + "acc_stderr": 0.004795051037917727, + "acc_norm": 0.4523999203345947, + "acc_norm_stderr": 0.004967118575905286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.03805797505590459, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.03805797505590459 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.04689765937278135, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.04689765937278135 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3537675606641124, + "acc_stderr": 0.017098184708161906, + "acc_norm": 0.3537675606641124, + "acc_norm_stderr": 0.017098184708161906 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.03455473702325438, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03455473702325438 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628834, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628834 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.037400593820293204, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.037400593820293204 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2090032154340836, + "acc_stderr": 0.02309314039837422, + "acc_norm": 0.2090032154340836, + "acc_norm_stderr": 0.02309314039837422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.031602951437766785, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.031602951437766785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3025210084033613, + "acc_stderr": 0.029837962388291926, + "acc_norm": 0.3025210084033613, + "acc_norm_stderr": 0.029837962388291926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.02345467488940429, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.02345467488940429 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617722, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617722 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885193, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.03265903381186194, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.03265903381186194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118355, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118355 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910508, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.025348097468097845, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.025348097468097845 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4129353233830846, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.4129353233830846, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415436, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415436 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686934, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686934 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615625, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615625 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765134, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765134 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.03292296639155142, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.03292296639155142 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281337, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281337 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27155963302752295, + "acc_stderr": 0.019069098363191445, + "acc_norm": 0.27155963302752295, + "acc_norm_stderr": 0.019069098363191445 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.027363593284684944, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.027363593284684944 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.04537935177947879, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.04537935177947879 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.01818521895431808, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.01818521895431808 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19907407407407407, + "acc_stderr": 0.027232298462690253, + "acc_norm": 0.19907407407407407, + "acc_norm_stderr": 0.027232298462690253 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.026431329870789562, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.026431329870789562 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584353, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771314, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771314 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511782, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511782 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713616, + "mc2": 0.4583811632060745, + "mc2_stderr": 0.015833164608294075 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3234946871310508, + "acc_stderr": 0.016083627290483675, + "acc_norm": 0.3825265643447462, + "acc_norm_stderr": 0.016709165387228813 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Mistral-7B-v0.1-orca_platy-1k", + "model_sha": "37f66cf60e2e2b4e299419202fe22a45b8d96874", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-platy-1k/result_2023-10-22 04:56:50.json b/mncai/Mistral-7B-v0.1-platy-1k/result_2023-10-22 04:56:50.json new file mode 100644 index 0000000000000000000000000000000000000000..1da369d78b2f598b977b9c7da145ca043ba7f6eb --- /dev/null +++ b/mncai/Mistral-7B-v0.1-platy-1k/result_2023-10-22 04:56:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.01340674176784762, + "acc_norm": 0.34044368600682595, + "acc_norm_stderr": 0.013847460518892976 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3564031069508066, + "acc_stderr": 0.004779574402771374, + "acc_norm": 0.45269866560446126, + "acc_norm_stderr": 0.004967402792744855 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2711864406779661, + "acc_stderr": 0.011354581451622985, + "acc_norm": 0.2711864406779661, + "acc_norm_stderr": 0.011354581451622985 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766379, + "mc2": 0.44338943697081723, + "mc2_stderr": 0.015766267984553387 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.01589132050552089, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.017072525875563103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Mistral-7B-v0.1-platy-1k", + "model_sha": "ae6790c706091e0a0ffada183edf6f08e06ba235", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Pr_Llama2_7B-Sh5K_Wi5K_Ne5K_Ct5K-Lr05_Ep4/result_2023-11-23 18:42:19.json b/mncai/Pr_Llama2_7B-Sh5K_Wi5K_Ne5K_Ct5K-Lr05_Ep4/result_2023-11-23 18:42:19.json new file mode 100644 index 0000000000000000000000000000000000000000..57388a3d7cb3168ae1b2bce1ee7dc695cbc61cad --- /dev/null +++ b/mncai/Pr_Llama2_7B-Sh5K_Wi5K_Ne5K_Ct5K-Lr05_Ep4/result_2023-11-23 18:42:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28924914675767915, + "acc_stderr": 0.013250012579393443, + "acc_norm": 0.32337883959044367, + "acc_norm_stderr": 0.013669421630012132 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3481378211511651, + "acc_stderr": 0.004754063867700179, + "acc_norm": 0.4107747460665206, + "acc_norm_stderr": 0.0049096898763420415 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.03786720706234215, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.03786720706234215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38058748403575987, + "acc_stderr": 0.017362564126075418, + "acc_norm": 0.38058748403575987, + "acc_norm_stderr": 0.017362564126075418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.03047297336338005, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.03047297336338005 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.03524068951567447, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.03524068951567447 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896614, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896614 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0317852971064275, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0317852971064275 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3903225806451613, + "acc_stderr": 0.027751256636969573, + "acc_norm": 0.3903225806451613, + "acc_norm_stderr": 0.027751256636969573 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.03265903381186194, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.03265903381186194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.0294451753281996, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.0294451753281996 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844082, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844082 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43283582089552236, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.43283582089552236, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.034140140070440354, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.034140140070440354 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02351729433596329, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02351729433596329 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36127167630057805, + "acc_stderr": 0.025862201852277895, + "acc_norm": 0.36127167630057805, + "acc_norm_stderr": 0.025862201852277895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.026725868809100786, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.026725868809100786 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.03499807276193338, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.03499807276193338 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3119266055045872, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.3119266055045872, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.02742047766262923, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.02742047766262923 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.045454545454545484, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.045454545454545484 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114023, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114023 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510944, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510944 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.0296246635811597, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.0296246635811597 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.030472526026726496, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.030472526026726496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3924050632911392, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.3924050632911392, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26988265971316816, + "acc_stderr": 0.011337381084250425, + "acc_norm": 0.26988265971316816, + "acc_norm_stderr": 0.011337381084250425 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.036639749943912434, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.036639749943912434 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.4563469870156814, + "mc2_stderr": 0.01627127621105625 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2762691853600944, + "acc_stderr": 0.015373387500464464, + "acc_norm": 0.3270365997638725, + "acc_norm_stderr": 0.016129047485457036 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Pr_Llama2_7B-Sh5K_Wi5K_Ne5K_Ct5K-Lr05_Ep4", + "model_sha": "500045ae7ecdb4ce4913d3d5effad48fa02433b5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/agiin-13.6B-v0.1/result_2023-12-20 10:58:15.json b/mncai/agiin-13.6B-v0.1/result_2023-12-20 10:58:15.json new file mode 100644 index 0000000000000000000000000000000000000000..2568d924f7413b17fe080263e9b347a19809ae87 --- /dev/null +++ b/mncai/agiin-13.6B-v0.1/result_2023-12-20 10:58:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.013796182947785562, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.01429122839353659 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3580959968133838, + "acc_stderr": 0.004784607222774629, + "acc_norm": 0.4439354710217088, + "acc_norm_stderr": 0.004958314114266491 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.028099240775809563, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.028099240775809563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296542, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296542 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036546, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036546 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.02812911270916589, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.02812911270916589 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.02905858830374884, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.02905858830374884 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.03487558640462064, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.03487558640462064 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.025402555503260912, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.025402555503260912 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.021436420955529414, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.021436420955529414 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852387, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852387 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34477124183006536, + "acc_stderr": 0.019228322018696647, + "acc_norm": 0.34477124183006536, + "acc_norm_stderr": 0.019228322018696647 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.0289473388516141, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.0289473388516141 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.0338517797604481, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.0338517797604481 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859926, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859926 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.0317229500433233, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.0317229500433233 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.012117939998705862, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.012117939998705862 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3659730722154223, + "mc1_stderr": 0.01686294168408836, + "mc2": 0.54650809718614, + "mc2_stderr": 0.016523179064123664 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39433293978748524, + "acc_stderr": 0.0168020906748932, + "acc_norm": 0.41912632821723733, + "acc_norm_stderr": 0.01696399501086279 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/agiin-13.6B-v0.1", + "model_sha": "6c93ca1d60b09b9b91e15c57dc8525827d371798", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/llama2-13b-dpo-v2/result_2023-12-03 07:07:36.json b/mncai/llama2-13b-dpo-v2/result_2023-12-03 07:07:36.json new file mode 100644 index 0000000000000000000000000000000000000000..069b0e96b8547b5bed2150984edd4e718bbe43dc --- /dev/null +++ b/mncai/llama2-13b-dpo-v2/result_2023-12-03 07:07:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497724, + "acc_norm": 0.40187713310580203, + "acc_norm_stderr": 0.014327268614578274 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3693487353116909, + "acc_stderr": 0.004816421208654088, + "acc_norm": 0.46883091017725553, + "acc_norm_stderr": 0.004980076707392432 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085335, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085335 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.032928028193303135, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.032928028193303135 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091852, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507384, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507384 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.02375292871211213, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.02375292871211213 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.39, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.037124548537213684, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.037124548537213684 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.019206606848825365, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.019206606848825365 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281288, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281288 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298825, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298825 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761974, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163908, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163908 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.333116036505867, + "acc_stderr": 0.012037930451512047, + "acc_norm": 0.333116036505867, + "acc_norm_stderr": 0.012037930451512047 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3708690330477356, + "mc1_stderr": 0.016909693580248804, + "mc2": 0.5432148185655791, + "mc2_stderr": 0.01594594261862486 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3837072018890201, + "acc_stderr": 0.016718924637231826, + "acc_norm": 0.41204250295159384, + "acc_norm_stderr": 0.01692227673852836 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/llama2-13b-dpo-v2", + "model_sha": "c164263281b29c2cca9929351e472484db01d78d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/llama2-13b-dpo-v3/result_2023-12-03 13:38:55.json b/mncai/llama2-13b-dpo-v3/result_2023-12-03 13:38:55.json new file mode 100644 index 0000000000000000000000000000000000000000..217e36d6c720c17b37b00e84e9ded5097faac174 --- /dev/null +++ b/mncai/llama2-13b-dpo-v3/result_2023-12-03 13:38:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4334470989761092, + "acc_stderr": 0.0144813762245589, + "acc_norm": 0.49146757679180886, + "acc_norm_stderr": 0.014609263165632186 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4462258514240191, + "acc_stderr": 0.004960839986099525, + "acc_norm": 0.5910177255526787, + "acc_norm_stderr": 0.0049064119844767886 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.037712831076265434, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.037712831076265434 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865633, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079019, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079019 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638627, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638627 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220406, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220406 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.027869320571664635, + "acc_norm": 0.4, + "acc_norm_stderr": 0.027869320571664635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009794, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009794 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230175, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230175 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.035123109641239346, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.035123109641239346 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112136, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112136 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.026788811931562764, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.026788811931562764 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323674, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323674 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5706422018348624, + "acc_stderr": 0.021222286397236504, + "acc_norm": 0.5706422018348624, + "acc_norm_stderr": 0.021222286397236504 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105932, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105932 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915185, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915185 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560524, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560524 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335317, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789838, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789838 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6835443037974683, + "acc_stderr": 0.03027497488021897, + "acc_norm": 0.6835443037974683, + "acc_norm_stderr": 0.03027497488021897 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36571056062581486, + "acc_stderr": 0.012301028188840568, + "acc_norm": 0.36571056062581486, + "acc_norm_stderr": 0.012301028188840568 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070265, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.37454100367197063, + "mc1_stderr": 0.016943535128405345, + "mc2": 0.5355134469215413, + "mc2_stderr": 0.01601794771276579 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4462809917355372, + "acc_stderr": 0.017090852631668332, + "acc_norm": 0.500590318772137, + "acc_norm_stderr": 0.01719034212344859 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/llama2-13b-dpo-v3", + "model_sha": "6dbdd5c708e7b7cc22552e620ad976f24d08beac", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/llama2-13b-dpo-v4/result_2023-12-05 02:27:45.json b/mncai/llama2-13b-dpo-v4/result_2023-12-05 02:27:45.json new file mode 100644 index 0000000000000000000000000000000000000000..63563b8b9e6bc3d82585d83eb66f3902b5709013 --- /dev/null +++ b/mncai/llama2-13b-dpo-v4/result_2023-12-05 02:27:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4189419795221843, + "acc_stderr": 0.014418106953639013, + "acc_norm": 0.48208191126279865, + "acc_norm_stderr": 0.014602005585490983 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4475204142601075, + "acc_stderr": 0.0049622205125483595, + "acc_norm": 0.6004779924317865, + "acc_norm_stderr": 0.004887991225950279 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.0177478742456836, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.0177478742456836 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840622, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561953, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561953 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650776, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650776 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523867, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523867 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336182, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336182 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.581651376146789, + "acc_stderr": 0.021149548596443878, + "acc_norm": 0.581651376146789, + "acc_norm_stderr": 0.021149548596443878 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685741, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685741 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985994, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985994 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763127, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763127 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741518, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.016451264440068242, + "mc2": 0.50360676511002, + "mc2_stderr": 0.015375083858045636 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49586776859504134, + "acc_stderr": 0.01718976703213082, + "acc_norm": 0.5596221959858324, + "acc_norm_stderr": 0.01706769977431297 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/llama2-13b-dpo-v4", + "model_sha": "4be900561f9dc8c16a2f26f5ebfa6c31ac35fd3e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/llama2-13b-dpo-v6/result_2023-12-11 14:18:02.json b/mncai/llama2-13b-dpo-v6/result_2023-12-11 14:18:02.json new file mode 100644 index 0000000000000000000000000000000000000000..d19de8d4a912d86454427e99ae2aa9fcf5ad3930 --- /dev/null +++ b/mncai/llama2-13b-dpo-v6/result_2023-12-11 14:18:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4308873720136519, + "acc_stderr": 0.014471133392642463, + "acc_norm": 0.4854948805460751, + "acc_norm_stderr": 0.014605241081370056 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43835889265086636, + "acc_stderr": 0.0049517176220079786, + "acc_norm": 0.5828520215096594, + "acc_norm_stderr": 0.004920800313232744 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.01778403453499242, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.01778403453499242 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986476, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986476 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.0303650508291152, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.0303650508291152 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206184, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206184 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.027996723180631466, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.027996723180631466 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.01997742260022747, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.01997742260022747 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281285, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281285 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776122, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35984354628422427, + "acc_stderr": 0.0122582604836898, + "acc_norm": 0.35984354628422427, + "acc_norm_stderr": 0.0122582604836898 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33414932680538556, + "mc1_stderr": 0.016512530677150524, + "mc2": 0.49948963054555756, + "mc2_stderr": 0.01566865586505939 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.017052633559856065 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/llama2-13b-dpo-v6", + "model_sha": "56f7d5a1b8f6ce6587cc10ff4ba335755bc062f8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/llama2-13b-dpo-v7/result_2023-12-13 01:47:38.json b/mncai/llama2-13b-dpo-v7/result_2023-12-13 01:47:38.json new file mode 100644 index 0000000000000000000000000000000000000000..23beca144c4ea1eb6cad61fa0397945cb2578d7c --- /dev/null +++ b/mncai/llama2-13b-dpo-v7/result_2023-12-13 01:47:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4453924914675768, + "acc_stderr": 0.014523987638344086, + "acc_norm": 0.49658703071672355, + "acc_norm_stderr": 0.014611050403244081 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4458275243975304, + "acc_stderr": 0.004960408362133238, + "acc_norm": 0.5933081059549891, + "acc_norm_stderr": 0.0049021253880022035 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.01777922523339422, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.01777922523339422 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723456, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723456 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.023973861998992062, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.023973861998992062 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456608, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456608 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833925, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833925 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.01997742260022747, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.01997742260022747 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312548, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312548 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301847, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3748370273794003, + "acc_stderr": 0.012363652467551924, + "acc_norm": 0.3748370273794003, + "acc_norm_stderr": 0.012363652467551924 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3488372093023256, + "mc1_stderr": 0.01668441985998688, + "mc2": 0.5096219872166472, + "mc2_stderr": 0.015864379577843746 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.017187658199336733, + "acc_norm": 0.5478158205430933, + "acc_norm_stderr": 0.017111567130916792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/llama2-13b-dpo-v7", + "model_sha": "bc6e1316dbe8f6530eee9850f42b63c6a38fe379", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/llama2-7b-dpo-v1/result_2023-12-02 10:33:57.json b/mncai/llama2-7b-dpo-v1/result_2023-12-02 10:33:57.json new file mode 100644 index 0000000000000000000000000000000000000000..6f038978dcf296977d73b6c7eb3612d28dc8a20d --- /dev/null +++ b/mncai/llama2-7b-dpo-v1/result_2023-12-02 10:33:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2883959044368601, + "acc_stderr": 0.01323839442242817, + "acc_norm": 0.3267918088737201, + "acc_norm_stderr": 0.013706665975587331 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34843656642103166, + "acc_stderr": 0.004755013243022123, + "acc_norm": 0.4192391953794065, + "acc_norm_stderr": 0.004924261467934419 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38058748403575987, + "acc_stderr": 0.017362564126075418, + "acc_norm": 0.38058748403575987, + "acc_norm_stderr": 0.017362564126075418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.027316847674192707, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.027316847674192707 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.034812853382329624, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.034812853382329624 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.039417076320648906, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.039417076320648906 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237653, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.02498535492310233, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.02498535492310233 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342582, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342582 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5024875621890548, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.5024875621890548, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240016, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624555, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624555 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.02642481659400985, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.02642481659400985 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.027044538138402612, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.027044538138402612 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3889908256880734, + "acc_stderr": 0.020902300887392863, + "acc_norm": 0.3889908256880734, + "acc_norm_stderr": 0.020902300887392863 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871137, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871137 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.028074158947600653, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.028074158947600653 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.018433427649401906, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.018433427649401906 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578728, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578728 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.01502408388332287, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.01502408388332287 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398865, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398865 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175364, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.030587326294702344, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.030587326294702344 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25749674054758803, + "acc_stderr": 0.01116770601490415, + "acc_norm": 0.25749674054758803, + "acc_norm_stderr": 0.01116770601490415 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.016272287957916944, + "mc2": 0.4875420781374341, + "mc2_stderr": 0.015912120755521442 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2798110979929162, + "acc_stderr": 0.01543371579542777, + "acc_norm": 0.33293978748524206, + "acc_norm_stderr": 0.01620243120837379 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/llama2-7b-dpo-v1", + "model_sha": "01245798d4fa8d885b632cfeb5b1a369b73a8003", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/mistral-7b-ko-1871-2p1/result_2023-10-06 10:27:15.json b/mncai/mistral-7b-ko-1871-2p1/result_2023-10-06 10:27:15.json new file mode 100644 index 0000000000000000000000000000000000000000..b6cc01a81676fa6e1e0f7909c445a1c84efcdc79 --- /dev/null +++ b/mncai/mistral-7b-ko-1871-2p1/result_2023-10-06 10:27:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3250853242320819, + "acc_stderr": 0.013688147309729124, + "acc_norm": 0.3609215017064846, + "acc_norm_stderr": 0.01403476138617546 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3724357697669787, + "acc_stderr": 0.00482465540607556, + "acc_norm": 0.4759012148974308, + "acc_norm_stderr": 0.0049839823961873655 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.017852981266633955, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.017852981266633955 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.032469569197899575, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.032469569197899575 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755292, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755292 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.04161808503501528, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.04161808503501528 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240627, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240627 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507755, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507755 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752042, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752042 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579859, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579859 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.02141099975363592, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.02141099975363592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02855582751652879, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02855582751652879 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.01965992249362335, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.01965992249362335 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963768, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963768 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653063, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653063 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882611, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882611 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459324, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459324 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31701346389228885, + "mc1_stderr": 0.016289203374403396, + "mc2": 0.4891689873387216, + "mc2_stderr": 0.015571905877884106 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4155844155844156, + "acc_stderr": 0.016943586313076568, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/mistral-7b-ko-1871-2p1", + "model_sha": "1ab1ccefadb9c3e832b4d2018cf0220974f998b3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/mistral-7b-v5/result_2023-12-11 09:32:53.json b/mncai/mistral-7b-v5/result_2023-12-11 09:32:53.json new file mode 100644 index 0000000000000000000000000000000000000000..26f73fc187f94173f59c30f8b6589c99a3241dac --- /dev/null +++ b/mncai/mistral-7b-v5/result_2023-12-11 09:32:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4402730375426621, + "acc_stderr": 0.014506769524804244, + "acc_norm": 0.47952218430034127, + "acc_norm_stderr": 0.01459913135303501 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40977892850029873, + "acc_stderr": 0.004907877144720013, + "acc_norm": 0.5423222465644294, + "acc_norm_stderr": 0.004971874159777691 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5708812260536399, + "acc_stderr": 0.017699388483126785, + "acc_norm": 0.5708812260536399, + "acc_norm_stderr": 0.017699388483126785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412205, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412205 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004257, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004257 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808086, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808086 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404904, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404904 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.02780749004427619, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.02780749004427619 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747664, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747664 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041017, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041017 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249032, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249032 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19888268156424582, + "acc_stderr": 0.013349892983092517, + "acc_norm": 0.19888268156424582, + "acc_norm_stderr": 0.013349892983092517 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33702737940026073, + "acc_stderr": 0.012072836273691327, + "acc_norm": 0.33702737940026073, + "acc_norm_stderr": 0.012072836273691327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.016305988648920598, + "mc2": 0.477954610435675, + "mc2_stderr": 0.015463026163904131 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.5194805194805194, + "acc_norm_stderr": 0.017177301992342547 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/mistral-7b-v5", + "model_sha": "4cd578d40e01a31f3de057ac14d6dc999ffb77d6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:03:30.json b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:03:30.json new file mode 100644 index 0000000000000000000000000000000000000000..ba96b12f23e0a6e648446faee639899902dcb9a0 --- /dev/null +++ b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:03:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29948805460750855, + "acc_stderr": 0.01338502163731356, + "acc_norm": 0.35494880546075086, + "acc_norm_stderr": 0.013983036904094094 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38627763393746267, + "acc_stderr": 0.004859004184694615, + "acc_norm": 0.4993029277036447, + "acc_norm_stderr": 0.00498977656227611 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038245, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038245 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.016073127851221225, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.016073127851221225 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.02655698211783875, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.02655698211783875 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789413, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789413 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140474, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140474 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.02715715047956382, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.02715715047956382 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533084, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533084 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.026841514322958924, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.026841514322958924 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2153846153846154, + "acc_stderr": 0.020843034557462878, + "acc_norm": 0.2153846153846154, + "acc_norm_stderr": 0.020843034557462878 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.02264421261552521, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.02264421261552521 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3092485549132948, + "acc_stderr": 0.02488314057007176, + "acc_norm": 0.3092485549132948, + "acc_norm_stderr": 0.02488314057007176 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868038, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.01792308766780305, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.01792308766780305 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.03129843185743809, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.03129843185743809 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.030274974880218977, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.030274974880218977 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.011328734403140327, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.011328734403140327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507956, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507956 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752337, + "mc2": 0.3942593710384486, + "mc2_stderr": 0.014811018314989769 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32113341204250295, + "acc_stderr": 0.01605276257911158, + "acc_norm": 0.42502951593860683, + "acc_norm_stderr": 0.016996016308362883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge", + "model_sha": "793d22f37f5945b22fbc33c447f8cdcaa4a50221", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:08:21.json b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:08:21.json new file mode 100644 index 0000000000000000000000000000000000000000..ba96b12f23e0a6e648446faee639899902dcb9a0 --- /dev/null +++ b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:08:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29948805460750855, + "acc_stderr": 0.01338502163731356, + "acc_norm": 0.35494880546075086, + "acc_norm_stderr": 0.013983036904094094 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38627763393746267, + "acc_stderr": 0.004859004184694615, + "acc_norm": 0.4993029277036447, + "acc_norm_stderr": 0.00498977656227611 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038245, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038245 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.016073127851221225, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.016073127851221225 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.02655698211783875, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.02655698211783875 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789413, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789413 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140474, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140474 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.02715715047956382, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.02715715047956382 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533084, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533084 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.026841514322958924, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.026841514322958924 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2153846153846154, + "acc_stderr": 0.020843034557462878, + "acc_norm": 0.2153846153846154, + "acc_norm_stderr": 0.020843034557462878 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.02264421261552521, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.02264421261552521 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3092485549132948, + "acc_stderr": 0.02488314057007176, + "acc_norm": 0.3092485549132948, + "acc_norm_stderr": 0.02488314057007176 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868038, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.01792308766780305, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.01792308766780305 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.03129843185743809, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.03129843185743809 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.030274974880218977, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.030274974880218977 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.011328734403140327, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.011328734403140327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507956, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507956 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752337, + "mc2": 0.3942593710384486, + "mc2_stderr": 0.014811018314989769 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32113341204250295, + "acc_stderr": 0.01605276257911158, + "acc_norm": 0.42502951593860683, + "acc_norm_stderr": 0.016996016308362883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge", + "model_sha": "793d22f37f5945b22fbc33c447f8cdcaa4a50221", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:39.json b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:39.json new file mode 100644 index 0000000000000000000000000000000000000000..8e19679f279a51edb8db2167bcd20f9832dd6224 --- /dev/null +++ b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.013406741767847612, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38707428799044014, + "acc_stderr": 0.004860854240821967, + "acc_norm": 0.5005974905397331, + "acc_norm_stderr": 0.004989777848791005 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3001277139208174, + "acc_stderr": 0.016389249691317425, + "acc_norm": 0.3001277139208174, + "acc_norm_stderr": 0.016389249691317425 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.028957342788342343, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.028957342788342343 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.0332939411907353, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.0332939411907353 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3536977491961415, + "acc_stderr": 0.027155208103200868, + "acc_norm": 0.3536977491961415, + "acc_norm_stderr": 0.027155208103200868 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728743, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728743 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.032894773300986155, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.032894773300986155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467122, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467122 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136112, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136112 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28974358974358977, + "acc_stderr": 0.02300062824368797, + "acc_norm": 0.28974358974358977, + "acc_norm_stderr": 0.02300062824368797 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.02489246917246283, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.02489246917246283 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.03057281131029961, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.03057281131029961 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724046, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724046 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878285, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878285 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199966, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919798, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919798 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594316, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776568, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776568 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.02572280220089582, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.02572280220089582 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924055, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924055 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868045, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.01898746225797865, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.01898746225797865 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.02718449890994162, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.02718449890994162 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677098, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677098 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.01840341571010979, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.01840341571010979 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260664, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260664 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146292, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146292 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.03029950656215418, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.03029950656215418 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.379746835443038, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.379746835443038, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.01190189563578609, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.01190189563578609 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03354092437591518, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03354092437591518 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715, + "mc2": 0.39860268740922694, + "mc2_stderr": 0.015473079108834439 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29279811097992914, + "acc_stderr": 0.015644823205401334, + "acc_norm": 0.33412042502951594, + "acc_norm_stderr": 0.016216763304239695 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3", + "model_sha": "33bfc3a65f355b210a21b6f7c8f04f49492835bf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:47.json b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:47.json new file mode 100644 index 0000000000000000000000000000000000000000..8e19679f279a51edb8db2167bcd20f9832dd6224 --- /dev/null +++ b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.013406741767847612, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38707428799044014, + "acc_stderr": 0.004860854240821967, + "acc_norm": 0.5005974905397331, + "acc_norm_stderr": 0.004989777848791005 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3001277139208174, + "acc_stderr": 0.016389249691317425, + "acc_norm": 0.3001277139208174, + "acc_norm_stderr": 0.016389249691317425 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.028957342788342343, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.028957342788342343 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.0332939411907353, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.0332939411907353 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3536977491961415, + "acc_stderr": 0.027155208103200868, + "acc_norm": 0.3536977491961415, + "acc_norm_stderr": 0.027155208103200868 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728743, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728743 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.032894773300986155, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.032894773300986155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467122, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467122 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136112, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136112 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28974358974358977, + "acc_stderr": 0.02300062824368797, + "acc_norm": 0.28974358974358977, + "acc_norm_stderr": 0.02300062824368797 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.02489246917246283, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.02489246917246283 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.03057281131029961, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.03057281131029961 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724046, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724046 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878285, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878285 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199966, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919798, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919798 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594316, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776568, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776568 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.02572280220089582, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.02572280220089582 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924055, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924055 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868045, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.01898746225797865, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.01898746225797865 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.02718449890994162, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.02718449890994162 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677098, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677098 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.01840341571010979, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.01840341571010979 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260664, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260664 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146292, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146292 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.03029950656215418, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.03029950656215418 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.379746835443038, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.379746835443038, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.01190189563578609, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.01190189563578609 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03354092437591518, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03354092437591518 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715, + "mc2": 0.39860268740922694, + "mc2_stderr": 0.015473079108834439 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29279811097992914, + "acc_stderr": 0.015644823205401334, + "acc_norm": 0.33412042502951594, + "acc_norm_stderr": 0.016216763304239695 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3", + "model_sha": "33bfc3a65f355b210a21b6f7c8f04f49492835bf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge-v2/result_2023-10-10 00:11:38.json b/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge-v2/result_2023-10-10 00:11:38.json new file mode 100644 index 0000000000000000000000000000000000000000..8a1e9b00fa1d48fc0d68415b2cd174421b336a82 --- /dev/null +++ b/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge-v2/result_2023-10-10 00:11:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946524, + "acc_norm": 0.37457337883959047, + "acc_norm_stderr": 0.014144193471893446 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3913563035251942, + "acc_stderr": 0.004870563921220623, + "acc_norm": 0.5044811790479984, + "acc_norm_stderr": 0.004989581008163209 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.033773102522091925, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.033773102522091925 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.046202840822800406, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.046202840822800406 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3231162196679438, + "acc_stderr": 0.016723726512343048, + "acc_norm": 0.3231162196679438, + "acc_norm_stderr": 0.016723726512343048 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.028504856470514178, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.028504856470514178 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.031417842916639245, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.031417842916639245 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488547, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488547 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21076233183856502, + "acc_stderr": 0.027373095500540193, + "acc_norm": 0.21076233183856502, + "acc_norm_stderr": 0.027373095500540193 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462202, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462202 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.034812853382329624, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.034812853382329624 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.02971914287634284, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.02971914287634284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35128205128205126, + "acc_stderr": 0.024203665177902796, + "acc_norm": 0.35128205128205126, + "acc_norm_stderr": 0.024203665177902796 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970187, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970187 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.02652270967466776, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.02652270967466776 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.29056603773584905, + "acc_stderr": 0.027943219989337142, + "acc_norm": 0.29056603773584905, + "acc_norm_stderr": 0.027943219989337142 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.043091187099464585, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.043091187099464585 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.03280188205348644, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.03280188205348644 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165582, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165582 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261124, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261124 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3901734104046243, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.3901734104046243, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.02577311116963044, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.02577311116963044 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.03480175668466036, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.03480175668466036 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3431192660550459, + "acc_stderr": 0.020354777736086037, + "acc_norm": 0.3431192660550459, + "acc_norm_stderr": 0.020354777736086037 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468008, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468008 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.041733491480834974, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.041733491480834974 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.037150621549989056, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.037150621549989056 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053435, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290403, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005333, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005333 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.01431099954796145, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.01431099954796145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898445, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.028666857790274655, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.028666857790274655 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.42616033755274263, + "acc_stderr": 0.032190357031317736, + "acc_norm": 0.42616033755274263, + "acc_norm_stderr": 0.032190357031317736 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3161668839634941, + "acc_stderr": 0.011875780894386578, + "acc_norm": 0.3161668839634941, + "acc_norm_stderr": 0.011875780894386578 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.01568092936402464, + "mc2": 0.4401386616406487, + "mc2_stderr": 0.015231170871530949 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.0158913205055209, + "acc_norm": 0.3612750885478158, + "acc_norm_stderr": 0.016515463022412014 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge-v2", + "model_sha": "fe0117824036ebe2d054ddf14b2ef04a1cb19dda", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssongit/Koala-12.8b-v1/result_2023-10-24 06:08:21.json b/mssongit/Koala-12.8b-v1/result_2023-10-24 06:08:21.json new file mode 100644 index 0000000000000000000000000000000000000000..f4bef19ba0f66c4ca2dc9dbd2fdf45c6df637f60 --- /dev/null +++ b/mssongit/Koala-12.8b-v1/result_2023-10-24 06:08:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21245733788395904, + "acc_stderr": 0.01195348290658295, + "acc_norm": 0.2431740614334471, + "acc_norm_stderr": 0.012536554144587096 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2568213503286198, + "acc_stderr": 0.00435987151963954, + "acc_norm": 0.27106154152559253, + "acc_norm_stderr": 0.00443599349258387 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21839080459770116, + "acc_stderr": 0.014774358319934486, + "acc_norm": 0.21839080459770116, + "acc_norm_stderr": 0.014774358319934486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977112, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977112 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23829787234042554, + "acc_stderr": 0.027851252973889802, + "acc_norm": 0.23829787234042554, + "acc_norm_stderr": 0.027851252973889802 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.03070982405056527, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.03070982405056527 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.13901345291479822, + "acc_stderr": 0.0232193528344745, + "acc_norm": 0.13901345291479822, + "acc_norm_stderr": 0.0232193528344745 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.03289477330098615, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.03289477330098615 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.037528339580033376, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.037528339580033376 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.02820554503327773, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.02820554503327773 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.29743589743589743, + "acc_stderr": 0.023177408131465932, + "acc_norm": 0.29743589743589743, + "acc_norm_stderr": 0.023177408131465932 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.17592592592592593, + "acc_stderr": 0.036809181416738786, + "acc_norm": 0.17592592592592593, + "acc_norm_stderr": 0.036809181416738786 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114468, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671746, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671746 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.036848815213890225, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.036848815213890225 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357301, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.034355680560478746, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.034355680560478746 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113935, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113935 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526502, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526502 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27522935779816515, + "acc_stderr": 0.019149093743155203, + "acc_norm": 0.27522935779816515, + "acc_norm_stderr": 0.019149093743155203 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.041349130183033156, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.041349130183033156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.02625605383571896, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.02625605383571896 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1652892561983471, + "acc_stderr": 0.03390780612972776, + "acc_norm": 0.1652892561983471, + "acc_norm_stderr": 0.03390780612972776 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.017077373377857, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.017077373377857 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.02484792135806396, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.02484792135806396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364555, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364555 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.027257202606114948, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.027257202606114948 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2979591836734694, + "acc_stderr": 0.02927956741106567, + "acc_norm": 0.2979591836734694, + "acc_norm_stderr": 0.02927956741106567 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.027479744550808503, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.027479744550808503 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25358539765319427, + "acc_stderr": 0.011111715336101127, + "acc_norm": 0.25358539765319427, + "acc_norm_stderr": 0.011111715336101127 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511784, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511784 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2178702570379437, + "mc1_stderr": 0.014450846714123911, + "mc2": 0.4610937921300059, + "mc2_stderr": 0.017090763627039533 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.10979929161747344, + "acc_stderr": 0.010748764686721606, + "acc_norm": 0.22904368358913813, + "acc_norm_stderr": 0.01444737227725382 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssongit/Koala-12.8b-v1", + "model_sha": "6e6754abd5a99c7984aa31eff410d3b8ee611ee8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeon/llama-2-ko-qlora4/result_2023-10-20 05:31:57.json b/nakhyeon/llama-2-ko-qlora4/result_2023-10-20 05:31:57.json new file mode 100644 index 0000000000000000000000000000000000000000..827919db8ac6777d56b56cfabd0f507b44b2fc6e --- /dev/null +++ b/nakhyeon/llama-2-ko-qlora4/result_2023-10-20 05:31:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.0136216961191733, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3826926906990639, + "acc_stderr": 0.004850508945116094, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852732, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852732 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896628, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022593, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37061566370146265, + "mc2_stderr": 0.014735163251703702 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeon/llama-2-ko-qlora4", + "model_sha": "759cf82ec24f0bd625edfa916f22701d30517591", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeon/llama-ko-qlora-1024/result_2023-10-21 07:57:16.json b/nakhyeon/llama-ko-qlora-1024/result_2023-10-21 07:57:16.json new file mode 100644 index 0000000000000000000000000000000000000000..3f5f860e021c2fc2e6c43701301225326c82b3d4 --- /dev/null +++ b/nakhyeon/llama-ko-qlora-1024/result_2023-10-21 07:57:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.0136216961191733, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3826926906990639, + "acc_stderr": 0.004850508945116094, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852732, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852732 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896628, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022593, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37061566370146265, + "mc2_stderr": 0.014735163251703702 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeon/llama-ko-qlora-1024", + "model_sha": "10f5e7aa49eb466a26eb3c696b72fff0e003a954", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeon/polyglot-ko-12b-qlora/result_2023-11-04 05:48:27.json b/nakhyeon/polyglot-ko-12b-qlora/result_2023-11-04 05:48:27.json new file mode 100644 index 0000000000000000000000000000000000000000..d01e216052e8b0c0ed2cc2a55303326bd87a3354 --- /dev/null +++ b/nakhyeon/polyglot-ko-12b-qlora/result_2023-11-04 05:48:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537365, + "acc_norm": 0.33447098976109213, + "acc_norm_stderr": 0.013787460322441374 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3855805616411073, + "acc_stderr": 0.004857374133246887, + "acc_norm": 0.5027882891854212, + "acc_norm_stderr": 0.004989703824167094 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.015671006009339572, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.015671006009339572 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039787, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185416, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185416 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03053289223393203, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03053289223393203 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868963, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868963 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026928, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026928 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.0309037969521145, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.0309037969521145 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239963, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239963 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.027601921381417604, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.027601921381417604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.026199808807561932, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.026199808807561932 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935554, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935554 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776578, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776578 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.036539469694421, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.036539469694421 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.022698657167855716, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.022698657167855716 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886338, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886338 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.0189041641715102, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.0189041641715102 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.02505850331695815, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.02505850331695815 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.20098039215686275, + "acc_stderr": 0.016211938889655574, + "acc_norm": 0.20098039215686275, + "acc_norm_stderr": 0.016211938889655574 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.024847921358063962, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.024847921358063962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.026882144922307748, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.026882144922307748 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.030486039389105303, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.030486039389105303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25749674054758803, + "acc_stderr": 0.011167706014904136, + "acc_norm": 0.25749674054758803, + "acc_norm_stderr": 0.011167706014904136 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.032876667586034886, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.032876667586034886 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731613, + "mc2": 0.390673097215474, + "mc2_stderr": 0.014736542111904073 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.01589132050552089, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.0168363772928493 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeon/polyglot-ko-12b-qlora", + "model_sha": "67243c8d6550f974faf3b6dc3a09ede91e7fda55", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeonn/llama-2-ko-qlora-prompt/result_2023-10-23 21:41:32.json b/nakhyeonn/llama-2-ko-qlora-prompt/result_2023-10-23 21:41:32.json new file mode 100644 index 0000000000000000000000000000000000000000..76646c486b8c4ec81178b1b2db8eebde05d6192b --- /dev/null +++ b/nakhyeonn/llama-2-ko-qlora-prompt/result_2023-10-23 21:41:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.0136216961191733, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3826926906990639, + "acc_stderr": 0.004850508945116094, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852732, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852732 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896628, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022593, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37061566370146265, + "mc2_stderr": 0.014735163251703702 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeonn/llama-2-ko-qlora-prompt", + "model_sha": "3c10df72b42af16132ec1528e2892ef74b65ae4b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeonn/llama-2-ko-qlora-prompt_1024/result_2023-10-23 21:46:13.json b/nakhyeonn/llama-2-ko-qlora-prompt_1024/result_2023-10-23 21:46:13.json new file mode 100644 index 0000000000000000000000000000000000000000..271962a0c4598362ad1916df6ba2bc941998b5d6 --- /dev/null +++ b/nakhyeonn/llama-2-ko-qlora-prompt_1024/result_2023-10-23 21:46:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.0136216961191733, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3826926906990639, + "acc_stderr": 0.004850508945116094, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852732, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852732 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896628, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022593, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37061566370146265, + "mc2_stderr": 0.014735163251703702 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeonn/llama-2-ko-qlora-prompt_1024", + "model_sha": "1d8e0cc8d22540be3c50816571d0ef34a98aecd3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeonn/llama-2-ko-qlora-prompt_1024_new/result_2023-10-25 10:59:57.json b/nakhyeonn/llama-2-ko-qlora-prompt_1024_new/result_2023-10-25 10:59:57.json new file mode 100644 index 0000000000000000000000000000000000000000..822d079a63b65f65c8b9dfeeb3c2258f40e50e05 --- /dev/null +++ b/nakhyeonn/llama-2-ko-qlora-prompt_1024_new/result_2023-10-25 10:59:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.0136216961191733, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3826926906990639, + "acc_stderr": 0.004850508945116094, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852732, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852732 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896628, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022593, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37061566370146265, + "mc2_stderr": 0.014735163251703702 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeonn/llama-2-ko-qlora-prompt_1024_new", + "model_sha": "4738337870d8e87a2f9a8aac64fcc6935d24afdc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeonn/llama-2-ko-qlora-prompt_1024_new_2/result_2023-10-28 12:36:11.json b/nakhyeonn/llama-2-ko-qlora-prompt_1024_new_2/result_2023-10-28 12:36:11.json new file mode 100644 index 0000000000000000000000000000000000000000..3f384076ad074aa0f4a583f79a28e4d8b25e4276 --- /dev/null +++ b/nakhyeonn/llama-2-ko-qlora-prompt_1024_new_2/result_2023-10-28 12:36:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.0136216961191733, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3826926906990639, + "acc_stderr": 0.004850508945116094, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852732, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852732 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896628, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022593, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37060775290008985, + "mc2_stderr": 0.014735131838171926 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeonn/llama-2-ko-qlora-prompt_1024_new_2", + "model_sha": "d9c7865e0ec6916275f5760289e9671df6aca2b5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nayohan/ko-ref-llama2-7b-Inst/result_2023-10-28 05:54:53.json b/nayohan/ko-ref-llama2-7b-Inst/result_2023-10-28 05:54:53.json new file mode 100644 index 0000000000000000000000000000000000000000..d681cee3ec94097bf2618710dd685a44f8d53455 --- /dev/null +++ b/nayohan/ko-ref-llama2-7b-Inst/result_2023-10-28 05:54:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.38310580204778155, + "acc_norm_stderr": 0.014206472661672874 + }, + "harness|ko_hellaswag|10": { + "acc": 0.373132842063334, + "acc_stderr": 0.004826485582191013, + "acc_norm": 0.48287193786098387, + "acc_norm_stderr": 0.004986852842576722 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.036155076303109344, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.036155076303109344 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.30268199233716475, + "acc_stderr": 0.016428781581749367, + "acc_norm": 0.30268199233716475, + "acc_norm_stderr": 0.016428781581749367 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.02616058445014049, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.02616058445014049 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.03318833286217283, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.03318833286217283 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.040393149787245605, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.040393149787245605 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.031353050095330855, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.031353050095330855 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.02851025151234193, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.02851025151234193 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971524, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971524 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293752, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462833, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.026480357179895705, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.026480357179895705 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.044612721759105085, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.044612721759105085 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895992, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895992 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360385, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360385 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.30845771144278605, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.30845771144278605, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.02218203720294837, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.02218203720294837 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577622, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.02555765398186805, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.02555765398186805 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909902, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909902 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25137614678899084, + "acc_stderr": 0.018599206360287415, + "acc_norm": 0.25137614678899084, + "acc_norm_stderr": 0.018599206360287415 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.024739981355113596, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.024739981355113596 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.034260594244031654, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.034260594244031654 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.017883188134667192, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.017883188134667192 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578729, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578729 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103982, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103982 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.02456220431414232, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.02456220431414232 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.030116426296540582, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.030116426296540582 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25684485006518903, + "acc_stderr": 0.011158455853098858, + "acc_norm": 0.25684485006518903, + "acc_norm_stderr": 0.011158455853098858 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.029102254389674082, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.029102254389674082 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871082, + "mc2": 0.3939753520317938, + "mc2_stderr": 0.01467030453530785 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3754427390791027, + "acc_stderr": 0.016648411589511095, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.017189767032130824 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nayohan/ko-ref-llama2-7b-Inst", + "model_sha": "1be158f488fbac5269d11273e9660aaed798540f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nayohan/llama-2-ko-7b-Inst/result_2023-10-25 05:26:11.json b/nayohan/llama-2-ko-7b-Inst/result_2023-10-25 05:26:11.json new file mode 100644 index 0000000000000000000000000000000000000000..9c5c7fd94bbc8a8c3fbfa364e038e7e14c8b82b0 --- /dev/null +++ b/nayohan/llama-2-ko-7b-Inst/result_2023-10-25 05:26:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32337883959044367, + "acc_stderr": 0.013669421630012122, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759091 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38548097988448515, + "acc_stderr": 0.004857140410776749, + "acc_norm": 0.4992033459470225, + "acc_norm_stderr": 0.004989775077835649 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260595, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260595 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.388250319284802, + "acc_stderr": 0.017427673295544333, + "acc_norm": 0.388250319284802, + "acc_norm_stderr": 0.017427673295544333 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.029644006577009618, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.029644006577009618 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36977491961414793, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.36977491961414793, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277726, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277726 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.021278393863586282, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.021278393863586282 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.031089826002937523, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.031089826002937523 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.39316239316239315, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.39316239316239315, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432118, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432118 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302505, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302505 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.02564410863926763, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.02564410863926763 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3283582089552239, + "acc_stderr": 0.03320685889744324, + "acc_norm": 0.3283582089552239, + "acc_norm_stderr": 0.03320685889744324 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106133, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106133 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2861271676300578, + "acc_stderr": 0.02433214677913413, + "acc_norm": 0.2861271676300578, + "acc_norm_stderr": 0.02433214677913413 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924055, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924055 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.025702640260603746, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.025702640260603746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565317, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565317 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29908256880733947, + "acc_stderr": 0.019630417285415175, + "acc_norm": 0.29908256880733947, + "acc_norm_stderr": 0.019630417285415175 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.026493033225145898, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.026493033225145898 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.018521756215423024, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.018521756215423024 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460997, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460997 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791033, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791033 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653696, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653696 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.02783302387139968, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.02783302387139968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2757496740547588, + "acc_stderr": 0.011413813609160989, + "acc_norm": 0.2757496740547588, + "acc_norm_stderr": 0.011413813609160989 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22276621787025705, + "mc1_stderr": 0.01456650696139675, + "mc2": 0.36506276866988424, + "mc2_stderr": 0.014809047702061968 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30342384887839435, + "acc_stderr": 0.015806072717909573, + "acc_norm": 0.38488783943329397, + "acc_norm_stderr": 0.01672857970149866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nayohan/llama-2-ko-7b-Inst", + "model_sha": "6d4b2a4bc363d79aa03edc287f8921dc1056262f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nayohan/polyglot-ko-1.3b-Inst/result_2023-10-21 12:29:48.json b/nayohan/polyglot-ko-1.3b-Inst/result_2023-10-21 12:29:48.json new file mode 100644 index 0000000000000000000000000000000000000000..99074466660798836e6a25ca770d6dfc6d85afd9 --- /dev/null +++ b/nayohan/polyglot-ko-1.3b-Inst/result_2023-10-21 12:29:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2354948805460751, + "acc_stderr": 0.012399451855004746, + "acc_norm": 0.28924914675767915, + "acc_norm_stderr": 0.013250012579393443 + }, + "harness|ko_hellaswag|10": { + "acc": 0.335291774546903, + "acc_stderr": 0.004711275408138412, + "acc_norm": 0.4166500697072296, + "acc_norm_stderr": 0.004919962822208309 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.03158149539338731, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.03158149539338731 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26181353767560667, + "acc_stderr": 0.015720838678445266, + "acc_norm": 0.26181353767560667, + "acc_norm_stderr": 0.015720838678445266 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977155, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977155 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18674698795180722, + "acc_stderr": 0.030338749144500594, + "acc_norm": 0.18674698795180722, + "acc_norm_stderr": 0.030338749144500594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.24663677130044842, + "acc_stderr": 0.028930413120910877, + "acc_norm": 0.24663677130044842, + "acc_norm_stderr": 0.028930413120910877 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.19083969465648856, + "acc_stderr": 0.03446513350752599, + "acc_norm": 0.19083969465648856, + "acc_norm_stderr": 0.03446513350752599 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124498, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124498 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.031041941304059288, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.031041941304059288 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.02424378399406217, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.02424378399406217 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368466, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368466 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132977, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132977 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553873, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553873 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.029475250236017193, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.029475250236017193 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.0355068398916558, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.0355068398916558 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22832369942196531, + "acc_stderr": 0.022598703804321624, + "acc_norm": 0.22832369942196531, + "acc_norm_stderr": 0.022598703804321624 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.023788583551658526, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.023788583551658526 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.018272575810231857, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.018272575810231857 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.02417084087934101, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.02417084087934101 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591205, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591205 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03583496176361062, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03583496176361062 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21405228758169934, + "acc_stderr": 0.01659342966232903, + "acc_norm": 0.21405228758169934, + "acc_norm_stderr": 0.01659342966232903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503803, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503803 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.02747974455080852, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.02747974455080852 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.22685788787483702, + "acc_stderr": 0.010696348133569929, + "acc_norm": 0.22685788787483702, + "acc_norm_stderr": 0.010696348133569929 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871082, + "mc2": 0.4038819958960065, + "mc2_stderr": 0.014994809766039018 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.34238488783943327, + "acc_stderr": 0.016313907844146373, + "acc_norm": 0.4309327036599764, + "acc_norm_stderr": 0.017025558196043136 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nayohan/polyglot-ko-1.3b-Inst", + "model_sha": "00eff028320c1d4483e112b7706119c4b7972948", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nayohan/polyglot-ko-12.8b-Inst/result_2023-11-08 17:26:10.json b/nayohan/polyglot-ko-12.8b-Inst/result_2023-11-08 17:26:10.json new file mode 100644 index 0000000000000000000000000000000000000000..c172791b80932ed7ace2c1aa0d25f6dd7a5ef31a --- /dev/null +++ b/nayohan/polyglot-ko-12.8b-Inst/result_2023-11-08 17:26:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537365, + "acc_norm": 0.3412969283276451, + "acc_norm_stderr": 0.013855831287497724 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38787094204341765, + "acc_stderr": 0.004862690594815711, + "acc_norm": 0.5082652857996415, + "acc_norm_stderr": 0.0049890996115368146 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1553398058252427, + "acc_stderr": 0.03586594738573974, + "acc_norm": 0.1553398058252427, + "acc_norm_stderr": 0.03586594738573974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.015842430835269428, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.015842430835269428 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.02675439134803978, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.02675439134803978 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233136, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233136 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.02575586592263294, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.02575586592263294 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2242152466367713, + "acc_stderr": 0.027991534258519524, + "acc_norm": 0.2242152466367713, + "acc_norm_stderr": 0.027991534258519524 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300992, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300992 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993178, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993178 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971545, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971545 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.17733990147783252, + "acc_stderr": 0.026874337276808342, + "acc_norm": 0.17733990147783252, + "acc_norm_stderr": 0.026874337276808342 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22903225806451613, + "acc_stderr": 0.023904914311782655, + "acc_norm": 0.22903225806451613, + "acc_norm_stderr": 0.023904914311782655 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.02961432369045665, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.02961432369045665 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708097, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708097 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782855, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782855 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.02947525023601718, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.02947525023601718 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.022019080012217893, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.022019080012217893 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123567, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123567 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886338, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886338 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.02869787397186068, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.02869787397186068 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.042663394431593935, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.042663394431593935 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.018368176306598618, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.018368176306598618 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.026415601914388995, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.026415601914388995 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.033911609343436025, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.033911609343436025 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.02484792135806396, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.02484792135806396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321616, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321616 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174917, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174917 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.02747974455080852, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.02747974455080852 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2627118644067797, + "acc_stderr": 0.01124054551499567, + "acc_norm": 0.2627118644067797, + "acc_norm_stderr": 0.01124054551499567 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.03198001660115071, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.03198001660115071 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.03158415324047709, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.03158415324047709 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041869, + "mc2": 0.4089067508722681, + "mc2_stderr": 0.014954626572503958 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.01602142705530958, + "acc_norm": 0.42621015348288077, + "acc_norm_stderr": 0.01700212260948926 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nayohan/polyglot-ko-12.8b-Inst", + "model_sha": "bf3d8224ee9b52ea97f48f624c446f8c5b8e7e39", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nayohan/polyglot-ko-5.8b-Inst-All/result_2023-10-25 17:49:08.json b/nayohan/polyglot-ko-5.8b-Inst-All/result_2023-10-25 17:49:08.json new file mode 100644 index 0000000000000000000000000000000000000000..ad0373b599c4eea8e85b6e0b4d886f108e57fa60 --- /dev/null +++ b/nayohan/polyglot-ko-5.8b-Inst-All/result_2023-10-25 17:49:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2773037542662116, + "acc_stderr": 0.013082095839059374, + "acc_norm": 0.3191126279863481, + "acc_norm_stderr": 0.0136216961191733 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3692491535550687, + "acc_stderr": 0.004816152074023089, + "acc_norm": 0.47321250746863175, + "acc_norm_stderr": 0.00498261523305711 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23391812865497075, + "acc_stderr": 0.03246721765117827, + "acc_norm": 0.23391812865497075, + "acc_norm_stderr": 0.03246721765117827 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.015745497169049057, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.015745497169049057 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03820169914517905, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03820169914517905 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.03036358219723816, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.03036358219723816 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.036293353299478595, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.036293353299478595 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.025218040373410616, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.025218040373410616 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22137404580152673, + "acc_stderr": 0.0364129708131373, + "acc_norm": 0.22137404580152673, + "acc_norm_stderr": 0.0364129708131373 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.034165204477475494, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.034165204477475494 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062947, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062947 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.02416278028401772, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.02416278028401772 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3225806451612903, + "acc_stderr": 0.026593084516572284, + "acc_norm": 0.3225806451612903, + "acc_norm_stderr": 0.026593084516572284 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708087, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708087 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191181, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191181 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.02176596167215453, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.02176596167215453 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21098265895953758, + "acc_stderr": 0.021966309947043117, + "acc_norm": 0.21098265895953758, + "acc_norm_stderr": 0.021966309947043117 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615769, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615769 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.024477222856135107, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.024477222856135107 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089116, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089116 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3137614678899083, + "acc_stderr": 0.019894723341469127, + "acc_norm": 0.3137614678899083, + "acc_norm_stderr": 0.019894723341469127 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.025553169991826514, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.025553169991826514 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736386, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736386 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.02765215314415926, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.02765215314415926 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2607561929595828, + "acc_stderr": 0.011213471559602336, + "acc_norm": 0.2607561929595828, + "acc_norm_stderr": 0.011213471559602336 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.0331750593000918, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.0331750593000918 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.014948812679062137, + "mc2": 0.40001430050776826, + "mc2_stderr": 0.014747441557861264 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3447461629279811, + "acc_stderr": 0.016340649905418683, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.0171191722080615 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nayohan/polyglot-ko-5.8b-Inst-All", + "model_sha": "08a90add0a4c6508a97387d75823333449728533", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nayohan/polyglot-ko-5.8b-Inst/result_2023-10-12 03:26:44.json b/nayohan/polyglot-ko-5.8b-Inst/result_2023-10-12 03:26:44.json new file mode 100644 index 0000000000000000000000000000000000000000..40fed0273c4b499aff2493a1a4598ae04415a91b --- /dev/null +++ b/nayohan/polyglot-ko-5.8b-Inst/result_2023-10-12 03:26:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27474402730375425, + "acc_stderr": 0.013044617212771227, + "acc_norm": 0.3191126279863481, + "acc_norm_stderr": 0.013621696119173307 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37950607448715395, + "acc_stderr": 0.004842723234022034, + "acc_norm": 0.4827723561043617, + "acc_norm_stderr": 0.004986818680313436 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.15789473684210525, + "acc_stderr": 0.027966785859160893, + "acc_norm": 0.15789473684210525, + "acc_norm_stderr": 0.027966785859160893 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.04541609446503948, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.04541609446503948 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21966794380587484, + "acc_stderr": 0.014805384478371162, + "acc_norm": 0.21966794380587484, + "acc_norm_stderr": 0.014805384478371162 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.14042553191489363, + "acc_stderr": 0.022712077616627864, + "acc_norm": 0.14042553191489363, + "acc_norm_stderr": 0.022712077616627864 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.03329394119073528, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.03329394119073528 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26688102893890675, + "acc_stderr": 0.025122637608816646, + "acc_norm": 0.26688102893890675, + "acc_norm_stderr": 0.025122637608816646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.14349775784753363, + "acc_stderr": 0.02352937126961819, + "acc_norm": 0.14349775784753363, + "acc_norm_stderr": 0.02352937126961819 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244442, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244442 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35128205128205126, + "acc_stderr": 0.024203665177902796, + "acc_norm": 0.35128205128205126, + "acc_norm_stderr": 0.024203665177902796 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.0259885007924119, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.0259885007924119 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724067, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724067 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721377, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721377 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.030769444967296018, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.030769444967296018 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.035839017547364134, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.035839017547364134 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.02185150982203172, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.02185150982203172 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566016, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22254335260115607, + "acc_stderr": 0.02239421566194282, + "acc_norm": 0.22254335260115607, + "acc_norm_stderr": 0.02239421566194282 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.0335195387952127, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.0335195387952127 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.023576881744005723, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.023576881744005723 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3229357798165138, + "acc_stderr": 0.020048115923415318, + "acc_norm": 0.3229357798165138, + "acc_norm_stderr": 0.020048115923415318 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818733, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818733 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736386, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736386 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.02484792135806396, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.02484792135806396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20675105485232068, + "acc_stderr": 0.026361651668389094, + "acc_norm": 0.20675105485232068, + "acc_norm_stderr": 0.026361651668389094 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23663624511082137, + "acc_stderr": 0.010855137351572746, + "acc_norm": 0.23663624511082137, + "acc_norm_stderr": 0.010855137351572746 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752329, + "mc2": 0.40162480294038216, + "mc2_stderr": 0.015030387645461886 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33412042502951594, + "acc_stderr": 0.016216763304239688, + "acc_norm": 0.4179456906729634, + "acc_norm_stderr": 0.016957292005279713 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nayohan/polyglot-ko-5.8b-Inst", + "model_sha": "f2d30b16043455a6303d11f28cfd012c46edc4cf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nlpai-lab/kullm-polyglot-12.8b-v2/result_2023-09-27 05:32:23.json b/nlpai-lab/kullm-polyglot-12.8b-v2/result_2023-09-27 05:32:23.json new file mode 100644 index 0000000000000000000000000000000000000000..43d51b1b80daf8ec56d92a4e477399bcd4809735 --- /dev/null +++ b/nlpai-lab/kullm-polyglot-12.8b-v2/result_2023-09-27 05:32:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2790102389078498, + "acc_stderr": 0.013106784883601346, + "acc_norm": 0.32764505119453924, + "acc_norm_stderr": 0.013715847940719344 + }, + "harness|ko_hellaswag|10": { + "acc": 0.386476797450707, + "acc_stderr": 0.004859467984155259, + "acc_norm": 0.4987054371639116, + "acc_norm_stderr": 0.00498976468673883 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24776500638569604, + "acc_stderr": 0.015438083080568961, + "acc_norm": 0.24776500638569604, + "acc_norm_stderr": 0.015438083080568961 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313141, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313141 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233136, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233136 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818784, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818784 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21076233183856502, + "acc_stderr": 0.027373095500540193, + "acc_norm": 0.21076233183856502, + "acc_norm_stderr": 0.027373095500540193 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277726, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277726 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02242127361292371, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02242127361292371 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144444, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144444 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1967741935483871, + "acc_stderr": 0.02261640942074203, + "acc_norm": 0.1967741935483871, + "acc_norm_stderr": 0.02261640942074203 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.02948036054954119, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.02948036054954119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889051982, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889051982 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766104, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766104 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.02264421261552521, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.02264421261552521 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.023176298203992005, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.023176298203992005 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.036803503712864616, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.036803503712864616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.024288533637726095, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.024288533637726095 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.03292296639155139, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.03292296639155139 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25137614678899084, + "acc_stderr": 0.018599206360287415, + "acc_norm": 0.25137614678899084, + "acc_norm_stderr": 0.018599206360287415 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242515, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242515 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.024288619466046102, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.024288619466046102 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.035208939510976534, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.035208939510976534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.017401816711427657, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.017401816711427657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872405, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103987, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103987 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1875, + "acc_stderr": 0.023709788253811766, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.023709788253811766 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3142857142857143, + "acc_stderr": 0.02971932942241748, + "acc_norm": 0.3142857142857143, + "acc_norm_stderr": 0.02971932942241748 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.028304657943035293, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.028304657943035293 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2522816166883963, + "acc_stderr": 0.011092789056875232, + "acc_norm": 0.2522816166883963, + "acc_norm_stderr": 0.011092789056875232 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511783, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511783 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.01497482727975233, + "mc2": 0.39040412705496613, + "mc2_stderr": 0.01471780652709213 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346453, + "acc_norm": 0.3955135773317591, + "acc_norm_stderr": 0.016810815902206035 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nlpai-lab/kullm-polyglot-12.8b-v2", + "model_sha": "9e0c9be881f663ca088b10faad15b54ea3ba779c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nlpai-lab/kullm-polyglot-5.8b-v2/result_2023-10-10 08:19:05.json b/nlpai-lab/kullm-polyglot-5.8b-v2/result_2023-10-10 08:19:05.json new file mode 100644 index 0000000000000000000000000000000000000000..6976ee510001edac5afc6f7d609dc81edc2cf61c --- /dev/null +++ b/nlpai-lab/kullm-polyglot-5.8b-v2/result_2023-10-10 08:19:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2841296928327645, + "acc_stderr": 0.013179442447653887, + "acc_norm": 0.3293515358361775, + "acc_norm_stderr": 0.013734057652635474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3694483170683131, + "acc_stderr": 0.004816690123209753, + "acc_norm": 0.47301334395538736, + "acc_norm_stderr": 0.004982508198584259 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727654, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727654 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.36893203883495146, + "acc_stderr": 0.04777615181156739, + "acc_norm": 0.36893203883495146, + "acc_norm_stderr": 0.04777615181156739 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24648786717752236, + "acc_stderr": 0.015411308769686936, + "acc_norm": 0.24648786717752236, + "acc_norm_stderr": 0.015411308769686936 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.028020226271200214, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.028020226271200214 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.16265060240963855, + "acc_stderr": 0.02873023789261379, + "acc_norm": 0.16265060240963855, + "acc_norm_stderr": 0.02873023789261379 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21524663677130046, + "acc_stderr": 0.027584066602208256, + "acc_norm": 0.21524663677130046, + "acc_norm_stderr": 0.027584066602208256 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3383838383838384, + "acc_stderr": 0.033711241426263014, + "acc_norm": 0.3383838383838384, + "acc_norm_stderr": 0.033711241426263014 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.023661296393964273, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.023661296393964273 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536975, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.14, + "acc_stderr": 0.03487350880197773, + "acc_norm": 0.14, + "acc_norm_stderr": 0.03487350880197773 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24516129032258063, + "acc_stderr": 0.02447224384089553, + "acc_norm": 0.24516129032258063, + "acc_norm_stderr": 0.02447224384089553 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.027778835904935434, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.027778835904935434 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.29056603773584905, + "acc_stderr": 0.02794321998933715, + "acc_norm": 0.29056603773584905, + "acc_norm_stderr": 0.02794321998933715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.0449429086625209, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.0449429086625209 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21957671957671956, + "acc_stderr": 0.021320018599770355, + "acc_norm": 0.21957671957671956, + "acc_norm_stderr": 0.021320018599770355 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071145, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071145 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02378858355165852, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02378858355165852 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104284, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104284 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28807339449541286, + "acc_stderr": 0.019416445892636018, + "acc_norm": 0.28807339449541286, + "acc_norm_stderr": 0.019416445892636018 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.025553169991826528, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.025553169991826528 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.016774672365468514, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.016774672365468514 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.025892151156709405, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.025892151156709405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602158, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602158 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3551020408163265, + "acc_stderr": 0.03063565515038764, + "acc_norm": 0.3551020408163265, + "acc_norm_stderr": 0.03063565515038764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.027652153144159256, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.027652153144159256 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.010865436690780267, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.010865436690780267 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923403, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.01539211880501501, + "mc2": 0.42389862375590953, + "mc2_stderr": 0.015026306992823544 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3152302243211334, + "acc_stderr": 0.01597353492379446, + "acc_norm": 0.3695395513577332, + "acc_norm_stderr": 0.016594883405685424 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nlpai-lab/kullm-polyglot-5.8b-v2", + "model_sha": "5981236c4fd4e624eca2326312d40419e6441256", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/norispace/marcoroni-kopenorcav3/result_2023-12-26 01:51:58.json b/norispace/marcoroni-kopenorcav3/result_2023-12-26 01:51:58.json new file mode 100644 index 0000000000000000000000000000000000000000..0c21656a86d0cede100419e1cf901440d8fd3c44 --- /dev/null +++ b/norispace/marcoroni-kopenorcav3/result_2023-12-26 01:51:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470137, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3906592312288389, + "acc_stderr": 0.004869010152280753, + "acc_norm": 0.49970125473013344, + "acc_norm_stderr": 0.004989780520782244 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.017852981266633955, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.017852981266633955 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.033141902221106564, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.033141902221106564 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237653, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467506, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467506 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131133, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131133 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235525, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235525 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881688, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881688 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296559, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296559 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2122905027932961, + "acc_stderr": 0.013676644685831726, + "acc_norm": 0.2122905027932961, + "acc_norm_stderr": 0.013676644685831726 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.03191282052669278, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.03191282052669278 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646568, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646568 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386844, + "mc2": 0.47655278688381186, + "mc2_stderr": 0.01571570439093294 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4025974025974026, + "acc_stderr": 0.01686102048640778, + "acc_norm": 0.45808736717827625, + "acc_norm_stderr": 0.017129852117911144 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "norispace/marcoroni-kopenorcav3", + "model_sha": "9beb5bf9e2cdc666413d90c7886c1eda1ab740dd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/norispace/marcoroni-openorca/result_2023-12-17 23:22:43.json b/norispace/marcoroni-openorca/result_2023-12-17 23:22:43.json new file mode 100644 index 0000000000000000000000000000000000000000..dc4b2151e5d2d7b6c6a507ef73ae88e6b562737a --- /dev/null +++ b/norispace/marcoroni-openorca/result_2023-12-17 23:22:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20477815699658702, + "acc_stderr": 0.011792544338513407, + "acc_norm": 0.24744027303754265, + "acc_norm_stderr": 0.012610352663292673 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2599083847839076, + "acc_stderr": 0.004376877619234108, + "acc_norm": 0.2613025293766182, + "acc_norm_stderr": 0.004384465219070753 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299956, + "mc2": 0.5171681414876445, + "mc2_stderr": 0.016488373677157792 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.13105076741440377, + "acc_stderr": 0.011601971778212315, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "norispace/marcoroni-openorca", + "model_sha": "dae17311f94b03522dc99a87484652d0b919350b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oh-yeontaek/llama-2-13B-LoRA-assemble/result_2023-09-28 00:33:01.json b/oh-yeontaek/llama-2-13B-LoRA-assemble/result_2023-09-28 00:33:01.json new file mode 100644 index 0000000000000000000000000000000000000000..da3597044366fe27adba376c30139f45e22a767c --- /dev/null +++ b/oh-yeontaek/llama-2-13B-LoRA-assemble/result_2023-09-28 00:33:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35921501706484643, + "acc_stderr": 0.014020224155839162, + "acc_norm": 0.4052901023890785, + "acc_norm_stderr": 0.014346869060229325 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36496713802031466, + "acc_stderr": 0.004804370563856225, + "acc_norm": 0.4689304919338777, + "acc_norm_stderr": 0.004980138679161039 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.01787994891443166, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.01787994891443166 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.028099240775809563, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.028099240775809563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.03308530426228258, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.03308530426228258 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425082, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425082 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112723, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112723 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.037752050135836386, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.037752050135836386 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223977, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296376, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.03038805130167812, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.03038805130167812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3229050279329609, + "acc_stderr": 0.01563844038024149, + "acc_norm": 0.3229050279329609, + "acc_norm_stderr": 0.01563844038024149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682486, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.011965311536571531, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.011965311536571531 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35128518971848227, + "mc1_stderr": 0.0167113581635444, + "mc2": 0.5184394133098864, + "mc2_stderr": 0.01600771387375644 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40613931523022434, + "acc_stderr": 0.016884749503191385, + "acc_norm": 0.41440377804014167, + "acc_norm_stderr": 0.01693658338394363 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oh-yeontaek/llama-2-13B-LoRA-assemble", + "model_sha": "85bb49d333dba4a08b051418663d16853ce30cee", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-ENin-test-v1/result_2023-12-13 08:24:14.json b/oopsung/Yi-Ko-6B-ENin-test-v1/result_2023-12-13 08:24:14.json new file mode 100644 index 0000000000000000000000000000000000000000..5395b0339de309577b33fb57a690f40a1ec4cd21 --- /dev/null +++ b/oopsung/Yi-Ko-6B-ENin-test-v1/result_2023-12-13 08:24:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3395904436860068, + "acc_stderr": 0.013839039762820166, + "acc_norm": 0.4061433447098976, + "acc_norm_stderr": 0.014351656690097858 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3959370643298148, + "acc_stderr": 0.004880515431323158, + "acc_norm": 0.5326628161720772, + "acc_norm_stderr": 0.004979123236507971 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5363984674329502, + "acc_stderr": 0.017832524079593258, + "acc_norm": 0.5363984674329502, + "acc_norm_stderr": 0.017832524079593258 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736118, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736118 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653333, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431177, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431177 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911498, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911498 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.037894017602836484, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.037894017602836484 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.024026846392873502, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.024026846392873502 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.02780749004427619, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.02780749004427619 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873632, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873632 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.028541722692618877, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.028541722692618877 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.019933627776857425, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.019933627776857425 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.02872386385328128, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.02872386385328128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976694, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.03186785930004129, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.03186785930004129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087305, + "mc2": 0.425101073539653, + "mc2_stderr": 0.014864041881952731 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5619834710743802, + "acc_stderr": 0.01705775370216029, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.01670916538722882 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-ENin-test-v1", + "model_sha": "fb559edd7e4a2809686425c555a38cda8e61c41e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-Exo-test-v1/result_2023-12-06 23:00:01.json b/oopsung/Yi-Ko-6B-Exo-test-v1/result_2023-12-06 23:00:01.json new file mode 100644 index 0000000000000000000000000000000000000000..d9669e57aa41e03e8ca5c55aab92c7b9a33890d1 --- /dev/null +++ b/oopsung/Yi-Ko-6B-Exo-test-v1/result_2023-12-06 23:00:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497723, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427005 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39782911770563634, + "acc_stderr": 0.00488449506945969, + "acc_norm": 0.5329615614419438, + "acc_norm_stderr": 0.004978927164792888 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5338441890166028, + "acc_stderr": 0.017838956009136805, + "acc_norm": 0.5338441890166028, + "acc_norm_stderr": 0.017838956009136805 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758396, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758396 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095495, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5290322580645161, + "acc_stderr": 0.02839601640276099, + "acc_norm": 0.5290322580645161, + "acc_norm_stderr": 0.02839601640276099 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255654, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255654 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911498, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911498 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.02413015829976262, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.02413015829976262 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6311926605504588, + "acc_stderr": 0.020686227560729548, + "acc_norm": 0.6311926605504588, + "acc_norm_stderr": 0.020686227560729548 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105935, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105935 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330361, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330361 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237035, + "mc2": 0.41691402541412415, + "mc2_stderr": 0.014819797591371593 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5678866587957497, + "acc_stderr": 0.017031170198851746, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.016728579701498658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-Exo-test-v1", + "model_sha": "ea7a32987d14dc84615ee31959e4edc36487da7a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-Exogen-test-v1/result_2023-12-12 23:02:24.json b/oopsung/Yi-Ko-6B-Exogen-test-v1/result_2023-12-12 23:02:24.json new file mode 100644 index 0000000000000000000000000000000000000000..54e4206bf9bd9a5db15649d05b2445ee46db09b6 --- /dev/null +++ b/oopsung/Yi-Ko-6B-Exogen-test-v1/result_2023-12-12 23:02:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34215017064846415, + "acc_stderr": 0.01386415215917728, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427005 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39772953594901417, + "acc_stderr": 0.004884287515461508, + "acc_norm": 0.533559051981677, + "acc_norm_stderr": 0.004978529642140935 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.01782913176428719, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.01782913176428719 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653333, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255654, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255654 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911498, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911498 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.03794012674697031, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.03794012674697031 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425072, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6311926605504588, + "acc_stderr": 0.020686227560729548, + "acc_norm": 0.6311926605504588, + "acc_norm_stderr": 0.020686227560729548 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.019944914136873583, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.019944914136873583 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330364, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330364 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237035, + "mc2": 0.415869370781035, + "mc2_stderr": 0.014811673986495334 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5690672963400236, + "acc_stderr": 0.01702555819604314, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.01672857970149866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-Exogen-test-v1", + "model_sha": "01f5d976626f1326236a5d2522eb0612c5306289", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-N-test-v1/result_2023-12-06 22:59:48.json b/oopsung/Yi-Ko-6B-N-test-v1/result_2023-12-06 22:59:48.json new file mode 100644 index 0000000000000000000000000000000000000000..9fdad033ca2124ba4ca59cbc3ece724604b5e105 --- /dev/null +++ b/oopsung/Yi-Ko-6B-N-test-v1/result_2023-12-06 22:59:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3387372013651877, + "acc_stderr": 0.01383056892797433, + "acc_norm": 0.4061433447098976, + "acc_norm_stderr": 0.014351656690097862 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39673371838279226, + "acc_stderr": 0.004882200364432364, + "acc_norm": 0.5327623979286995, + "acc_norm_stderr": 0.0049790580784786955 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5338441890166028, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.5338441890166028, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.025317649726448652, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.025317649726448652 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.02948036054954119, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.02948036054954119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871916, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.038016851045244604, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.038016851045244604 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.024419234966819064, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.024419234966819064 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6201834862385321, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.6201834862385321, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805434, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4199346405228758, + "acc_stderr": 0.019966811178256483, + "acc_norm": 0.4199346405228758, + "acc_norm_stderr": 0.019966811178256483 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330366, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330366 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522505, + "mc2": 0.4189628761359413, + "mc2_stderr": 0.014837511319155058 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5596221959858324, + "acc_stderr": 0.01706769977431298, + "acc_norm": 0.6080283353010626, + "acc_norm_stderr": 0.016784332119424077 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-N-test-v1", + "model_sha": "21013e0de8b706a0462a2a0ebc7f7e1f9be4b5ab", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-all-test-v1/result_2023-12-14 08:29:43.json b/oopsung/Yi-Ko-6B-all-test-v1/result_2023-12-14 08:29:43.json new file mode 100644 index 0000000000000000000000000000000000000000..9097df49405a157604eb5d79f24c6e812ea9ada6 --- /dev/null +++ b/oopsung/Yi-Ko-6B-all-test-v1/result_2023-12-14 08:29:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3361774744027304, + "acc_stderr": 0.013804855026205763, + "acc_norm": 0.4035836177474403, + "acc_norm_stderr": 0.014337158914268447 + }, + "harness|ko_hellaswag|10": { + "acc": 0.395538737303326, + "acc_stderr": 0.004879667889198489, + "acc_norm": 0.5326628161720772, + "acc_norm_stderr": 0.004979123236507971 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5389527458492975, + "acc_stderr": 0.017825621793239016, + "acc_norm": 0.5389527458492975, + "acc_norm_stderr": 0.017825621793239016 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.02531063925493391, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.02531063925493391 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431177, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431177 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114982, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114982 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.037894017602836484, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.037894017602836484 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.0198984127176359, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.0198984127176359 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3376792698826597, + "acc_stderr": 0.012078563777145548, + "acc_norm": 0.3376792698826597, + "acc_norm_stderr": 0.012078563777145548 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674102, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674102 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.4224730625453278, + "mc2_stderr": 0.014842293442821076 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5608028335301063, + "acc_stderr": 0.017062775744780705, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.016747577991642785 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-all-test-v1", + "model_sha": "06d9f443c5219ec69bd214d1f7d8c08668104548", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-com-test-v1/result_2023-12-21 05:57:50.json b/oopsung/Yi-Ko-6B-com-test-v1/result_2023-12-21 05:57:50.json new file mode 100644 index 0000000000000000000000000000000000000000..a3ef3b920e7852cca483df096744ef77fc2aaf5a --- /dev/null +++ b/oopsung/Yi-Ko-6B-com-test-v1/result_2023-12-21 05:57:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.01388881628678211, + "acc_norm": 0.4138225255972696, + "acc_norm_stderr": 0.014392730009221007 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3973312089225254, + "acc_stderr": 0.00488345518890897, + "acc_norm": 0.5322644891455885, + "acc_norm_stderr": 0.004979381876712618 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5338441890166028, + "acc_stderr": 0.017838956009136805, + "acc_norm": 0.5338441890166028, + "acc_norm_stderr": 0.017838956009136805 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177687, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177687 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933903, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933903 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653333, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809447, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809447 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842507, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842507 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860807, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.020728368457638497, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.020728368457638497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805434, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596455, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596455 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897639, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897639 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087305, + "mc2": 0.42063207226082105, + "mc2_stderr": 0.014843379672251859 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5726092089728453, + "acc_stderr": 0.017008129844823156, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.01673813076032175 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-com-test-v1", + "model_sha": "0ca70a1f4310f7f0603e9eaa80f22b68102a5755", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-orcapus-test-v1/result_2023-12-06 22:59:32.json b/oopsung/Yi-Ko-6B-orcapus-test-v1/result_2023-12-06 22:59:32.json new file mode 100644 index 0000000000000000000000000000000000000000..da9231750c4ce9474301b6e44d30c8846724c3e4 --- /dev/null +++ b/oopsung/Yi-Ko-6B-orcapus-test-v1/result_2023-12-06 22:59:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3464163822525597, + "acc_stderr": 0.013905011180063246, + "acc_norm": 0.4180887372013652, + "acc_norm_stderr": 0.01441398839699608 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39494124676359293, + "acc_stderr": 0.004878390226591715, + "acc_norm": 0.5323640709022107, + "acc_norm_stderr": 0.004979317515432522 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.01784491809046855, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.01784491809046855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.03437305501980619, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.03437305501980619 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.028327743091561088, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.028327743091561088 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431183, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389177, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389177 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842508, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842508 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723368, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723368 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6330275229357798, + "acc_stderr": 0.020664675659520532, + "acc_norm": 0.6330275229357798, + "acc_norm_stderr": 0.020664675659520532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557956, + "mc2": 0.41368059870095875, + "mc2_stderr": 0.014743626921643821 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.564344746162928, + "acc_stderr": 0.017047415229476316, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.01675692157106942 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-orcapus-test-v1", + "model_sha": "aaf03f76d00856f3e4a43a182bf04ea58a914d68", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-tech-test-v1/result_2023-12-06 23:00:10.json b/oopsung/Yi-Ko-6B-tech-test-v1/result_2023-12-06 23:00:10.json new file mode 100644 index 0000000000000000000000000000000000000000..0495055be90a4eeac16b3b17c5b199a748ceb982 --- /dev/null +++ b/oopsung/Yi-Ko-6B-tech-test-v1/result_2023-12-06 23:00:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.01384746051889298, + "acc_norm": 0.4069965870307167, + "acc_norm_stderr": 0.014356399418009126 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3963353913563035, + "acc_stderr": 0.0048813595891489935, + "acc_norm": 0.5315674168492333, + "acc_norm_stderr": 0.004979826829400774 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5338441890166028, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.5338441890166028, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177687, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177687 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.028434533152681876, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.028434533152681876 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809447, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809447 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389177, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389177 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066468, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066468 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.037894017602836484, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.037894017602836484 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842508, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842508 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723368, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723368 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.634862385321101, + "acc_stderr": 0.020642801454384, + "acc_norm": 0.634862385321101, + "acc_norm_stderr": 0.020642801454384 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105935, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105935 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33702737940026073, + "acc_stderr": 0.012072836273691327, + "acc_norm": 0.33702737940026073, + "acc_norm_stderr": 0.012072836273691327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4089346755804923, + "mc2_stderr": 0.014760925941294287 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5726092089728453, + "acc_stderr": 0.017008129844823156, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.01673813076032175 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-tech-test-v1", + "model_sha": "f47a57c15cae3832335d3df550ff08f20c717822", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-wiki-test-v1/result_2023-12-07 00:36:08.json b/oopsung/Yi-Ko-6B-wiki-test-v1/result_2023-12-07 00:36:08.json new file mode 100644 index 0000000000000000000000000000000000000000..6acf4adaa0b4e6779d8a6e6b6dcd81ce500c9c72 --- /dev/null +++ b/oopsung/Yi-Ko-6B-wiki-test-v1/result_2023-12-07 00:36:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3438566552901024, + "acc_stderr": 0.013880644570156211, + "acc_norm": 0.4121160409556314, + "acc_norm_stderr": 0.0143839153022254 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39842660824536946, + "acc_stderr": 0.004885735963346905, + "acc_norm": 0.5349531965743876, + "acc_norm_stderr": 0.004977574188421319 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.01784491809046855, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.01784491809046855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.02838474778881334, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.02838474778881334 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.029343114798094462, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.029343114798094462 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911498, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911498 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860807, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635896, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635896 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704716004, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704716004 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522509, + "mc2": 0.41603835953324, + "mc2_stderr": 0.014820168398190375 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5655253837072018, + "acc_stderr": 0.017042098620824935, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.016728579701498658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-wiki-test-v1", + "model_sha": "aacbf8f82cb6c76af34851c83f2269d1ee45ee70", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-ko-F-v1/result_2023-12-28 06:29:25.json b/oopsung/Yi-ko-F-v1/result_2023-12-28 06:29:25.json new file mode 100644 index 0000000000000000000000000000000000000000..50342f39b4ee26a67b59c845bb4347bfcad32cc9 --- /dev/null +++ b/oopsung/Yi-ko-F-v1/result_2023-12-28 06:29:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3430034129692833, + "acc_stderr": 0.01387242322371817, + "acc_norm": 0.4052901023890785, + "acc_norm_stderr": 0.014346869060229325 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3950408285202151, + "acc_stderr": 0.004878603699686037, + "acc_norm": 0.5302728540131448, + "acc_norm_stderr": 0.004980627287147577 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865633, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736118, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736118 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.03077265364207567, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.03077265364207567 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255168, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255168 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.03468343295111126, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.03468343295111126 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.020920058346111062, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.020920058346111062 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.019997973035458333, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.019997973035458333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.030290619180485694, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.030290619180485694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.031137304297185805, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.031137304297185805 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3200782268578879, + "acc_stderr": 0.01191479194763853, + "acc_norm": 0.3200782268578879, + "acc_norm_stderr": 0.01191479194763853 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.038783721137112745, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.038783721137112745 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253595, + "mc2": 0.410763306988178, + "mc2_stderr": 0.014699934243376766 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5218417945690673, + "acc_stderr": 0.01717394447429438, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.01701403811929749 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-ko-F-v1", + "model_sha": "6969a37bf3a4f29a2570a584d5cc3dba7c28fd78", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-7b-exo-test-v1/result_2023-11-29 04:04:02.json b/oopsung/llama2-7b-exo-test-v1/result_2023-11-29 04:04:02.json new file mode 100644 index 0000000000000000000000000000000000000000..f79b68c79171167bd2b2ef6efcba5e3606735de9 --- /dev/null +++ b/oopsung/llama2-7b-exo-test-v1/result_2023-11-29 04:04:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.01359243151906808, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407165 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38169687313284206, + "acc_stderr": 0.004848099661619696, + "acc_norm": 0.4947221668990241, + "acc_norm_stderr": 0.004989503417767286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3486590038314176, + "acc_stderr": 0.017041243143490953, + "acc_norm": 0.3486590038314176, + "acc_norm_stderr": 0.017041243143490953 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996795, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996795 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491223, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491223 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893947, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893947 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3025210084033613, + "acc_stderr": 0.029837962388291926, + "acc_norm": 0.3025210084033613, + "acc_norm_stderr": 0.029837962388291926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28717948717948716, + "acc_stderr": 0.022939925418530627, + "acc_norm": 0.28717948717948716, + "acc_norm_stderr": 0.022939925418530627 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335134, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335134 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3547008547008547, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.3547008547008547, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3132075471698113, + "acc_stderr": 0.02854479331905533, + "acc_norm": 0.3132075471698113, + "acc_norm_stderr": 0.02854479331905533 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333334, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333334 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845334, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615769, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615769 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.025630824975621348, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.025630824975621348 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3834862385321101, + "acc_stderr": 0.02084715664191598, + "acc_norm": 0.3834862385321101, + "acc_norm_stderr": 0.02084715664191598 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.035670166752768614, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.035670166752768614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.0276841818833029, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.0276841818833029 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.272875816993464, + "acc_stderr": 0.01802047414839358, + "acc_norm": 0.272875816993464, + "acc_norm_stderr": 0.01802047414839358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.02689170942834396, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.02689170942834396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936484, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936484 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280058, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280058 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2897959183673469, + "acc_stderr": 0.02904308868330433, + "acc_norm": 0.2897959183673469, + "acc_norm_stderr": 0.02904308868330433 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.37130801687763715, + "acc_stderr": 0.0314506860074486, + "acc_norm": 0.37130801687763715, + "acc_norm_stderr": 0.0314506860074486 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271817, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271817 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083292, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083292 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511784, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511784 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731618, + "mc2": 0.369212058529924, + "mc2_stderr": 0.014746397665894159 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2892561983471074, + "acc_stderr": 0.015588800386053555, + "acc_norm": 0.4203069657615112, + "acc_norm_stderr": 0.016970598281177703 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-7b-exo-test-v1", + "model_sha": "a679a02782388b0ae0949b64c69f5681790aa569", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-7b-ko-Orcapus-test-v1/result_2023-11-30 13:49:45.json b/oopsung/llama2-7b-ko-Orcapus-test-v1/result_2023-11-30 13:49:45.json new file mode 100644 index 0000000000000000000000000000000000000000..13aa5fac5c8d7e184327ce7c1da6c9a1729bb416 --- /dev/null +++ b/oopsung/llama2-7b-ko-Orcapus-test-v1/result_2023-11-30 13:49:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32337883959044367, + "acc_stderr": 0.013669421630012127, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.014194389086685251 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3820952001593308, + "acc_stderr": 0.004849065962692134, + "acc_norm": 0.4947221668990241, + "acc_norm_stderr": 0.004989503417767286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3397190293742018, + "acc_stderr": 0.016936394114301635, + "acc_norm": 0.3397190293742018, + "acc_norm_stderr": 0.016936394114301635 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.030363582197238167, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.030363582197238167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.0350729543137052, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.0350729543137052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.02944249558585747, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.02944249558585747 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.029213549414372156, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.029213549414372156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.02228214120420443, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.02228214120420443 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27741935483870966, + "acc_stderr": 0.025470196835900055, + "acc_norm": 0.27741935483870966, + "acc_norm_stderr": 0.025470196835900055 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32075471698113206, + "acc_stderr": 0.02872750295788027, + "acc_norm": 0.32075471698113206, + "acc_norm_stderr": 0.02872750295788027 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969655, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969655 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.30845771144278605, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.30845771144278605, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.030085743248565666, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.030085743248565666 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.024946792225272314, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.024946792225272314 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625672, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625672 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.03119584087770029, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.03119584087770029 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3211009174311927, + "acc_stderr": 0.020018149772733747, + "acc_norm": 0.3211009174311927, + "acc_norm_stderr": 0.020018149772733747 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.027121956071388863, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.027121956071388863 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28594771241830064, + "acc_stderr": 0.01828048507295468, + "acc_norm": 0.28594771241830064, + "acc_norm_stderr": 0.01828048507295468 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291518, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291518 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329879, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329879 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369922, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369922 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073163, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35443037974683544, + "acc_stderr": 0.031137304297185812, + "acc_norm": 0.35443037974683544, + "acc_norm_stderr": 0.031137304297185812 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516076, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752322, + "mc2": 0.3821392578358511, + "mc2_stderr": 0.014847374366938948 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091115, + "acc_norm": 0.4344746162927981, + "acc_norm_stderr": 0.01704209862082494 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-7b-ko-Orcapus-test-v1", + "model_sha": "714ee1162de8359dd817b5c39158055213897f6a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-7b-ko-wiki-test-v1/result_2023-12-05 23:06:00.json b/oopsung/llama2-7b-ko-wiki-test-v1/result_2023-12-05 23:06:00.json new file mode 100644 index 0000000000000000000000000000000000000000..ed27d181f7008f193894bda342917cd0f28c0293 --- /dev/null +++ b/oopsung/llama2-7b-ko-wiki-test-v1/result_2023-12-05 23:06:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.013621696119173299, + "acc_norm": 0.3771331058020478, + "acc_norm_stderr": 0.014163366896192598 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3828918542123083, + "acc_stderr": 0.004850988215167545, + "acc_norm": 0.49522007568213505, + "acc_norm_stderr": 0.004989553396413108 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3537675606641124, + "acc_stderr": 0.017098184708161903, + "acc_norm": 0.3537675606641124, + "acc_norm_stderr": 0.017098184708161903 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740749, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740749 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2846153846153846, + "acc_stderr": 0.022878322799706263, + "acc_norm": 0.2846153846153846, + "acc_norm_stderr": 0.022878322799706263 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.025822106119415898, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.025822106119415898 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32075471698113206, + "acc_stderr": 0.02872750295788027, + "acc_norm": 0.32075471698113206, + "acc_norm_stderr": 0.02872750295788027 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.034089978868575295, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.034089978868575295 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.025773111169630453, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.025773111169630453 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.03257714077709662, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.03257714077709662 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3871559633027523, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.3871559633027523, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290303, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290303 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.0387813988879761, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.0387813988879761 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663137, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663137 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.02689170942834396, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.02689170942834396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.02879518557429129, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.02879518557429129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28878748370273793, + "acc_stderr": 0.011574914757219962, + "acc_norm": 0.28878748370273793, + "acc_norm_stderr": 0.011574914757219962 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923403, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624337, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624337 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871101, + "mc2": 0.3714247171675403, + "mc2_stderr": 0.014762111514590639 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091112, + "acc_norm": 0.4167650531286895, + "acc_norm_stderr": 0.01695048914610883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-7b-ko-wiki-test-v1", + "model_sha": "12e202c014b0c901644b34a9d422be93f5a7f959", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-7b-koNqa-test-v1/result_2023-11-30 07:12:33.json b/oopsung/llama2-7b-koNqa-test-v1/result_2023-11-30 07:12:33.json new file mode 100644 index 0000000000000000000000000000000000000000..60eb1274db06f1ece813ea3907cc261f3c29f97a --- /dev/null +++ b/oopsung/llama2-7b-koNqa-test-v1/result_2023-11-30 07:12:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3148464163822526, + "acc_stderr": 0.013572657703084948, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349815 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3852818163712408, + "acc_stderr": 0.004856672322044455, + "acc_norm": 0.4971121290579566, + "acc_norm_stderr": 0.004989698183207841 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.32950191570881227, + "acc_stderr": 0.016808322261740446, + "acc_norm": 0.32950191570881227, + "acc_norm_stderr": 0.016808322261740446 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.033184773338453315, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.033184773338453315 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277726, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277726 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.021992016662370547, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.021992016662370547 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885203, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885203 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33584905660377357, + "acc_stderr": 0.029067220146644826, + "acc_norm": 0.33584905660377357, + "acc_norm_stderr": 0.029067220146644826 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.036030385453603826, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.036030385453603826 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3034825870646766, + "acc_stderr": 0.03251006816458618, + "acc_norm": 0.3034825870646766, + "acc_norm_stderr": 0.03251006816458618 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.1597222222222222, + "acc_stderr": 0.030635578972093278, + "acc_norm": 0.1597222222222222, + "acc_norm_stderr": 0.030635578972093278 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.024946792225272314, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.024946792225272314 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.034624199316156234, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.034624199316156234 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.026229649178821163, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.026229649178821163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3724770642201835, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.3724770642201835, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110307, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110307 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4132231404958678, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.01818521895431808, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.01818521895431808 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02915752218460559, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02915752218460559 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142777, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142777 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.34177215189873417, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.34177215189873417, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803546, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803546 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.03742597043806586, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.03742597043806586 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.3738242893549986, + "mc2_stderr": 0.014771561798919752 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29161747343565525, + "acc_stderr": 0.015626276690070242, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.01697710193260152 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-7b-koNqa-test-v1", + "model_sha": "6a983a209cefd4285f18b60c380e818544155175", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-7b-n-ox-test-v1/result_2023-11-29 22:55:16.json b/oopsung/llama2-7b-n-ox-test-v1/result_2023-11-29 22:55:16.json new file mode 100644 index 0000000000000000000000000000000000000000..2f7cb433b406ce2292efd1f565b55168b918244a --- /dev/null +++ b/oopsung/llama2-7b-n-ox-test-v1/result_2023-11-29 22:55:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3122866894197952, + "acc_stderr": 0.013542598541688065, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349814 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38498307110137425, + "acc_stderr": 0.00485596857899873, + "acc_norm": 0.49731129257120094, + "acc_norm_stderr": 0.004989709267191029 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.03301405946987249, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.03301405946987249 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.33205619412515963, + "acc_stderr": 0.01684117465529572, + "acc_norm": 0.33205619412515963, + "acc_norm_stderr": 0.01684117465529572 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.03036358219723816, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.03036358219723816 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3247588424437299, + "acc_stderr": 0.026596782287697046, + "acc_norm": 0.3247588424437299, + "acc_norm_stderr": 0.026596782287697046 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533953, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533953 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.021992016662370547, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.021992016662370547 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.02556060472102288, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.02556060472102288 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3247863247863248, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.3247863247863248, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33962264150943394, + "acc_stderr": 0.02914690474779834, + "acc_norm": 0.33962264150943394, + "acc_norm_stderr": 0.02914690474779834 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.29850746268656714, + "acc_stderr": 0.032357437893550424, + "acc_norm": 0.29850746268656714, + "acc_norm_stderr": 0.032357437893550424 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594295, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.02210112878741542, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.02210112878741542 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.1736111111111111, + "acc_stderr": 0.031674733837957166, + "acc_norm": 0.1736111111111111, + "acc_norm_stderr": 0.031674733837957166 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153193, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153193 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33641975308641975, + "acc_stderr": 0.02628973494595293, + "acc_norm": 0.33641975308641975, + "acc_norm_stderr": 0.02628973494595293 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.03097543638684543, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.03097543638684543 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3908256880733945, + "acc_stderr": 0.02092005834611106, + "acc_norm": 0.3908256880733945, + "acc_norm_stderr": 0.02092005834611106 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.03129843185743809, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.03129843185743809 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.027363593284684948, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.027363593284684948 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28594771241830064, + "acc_stderr": 0.01828048507295468, + "acc_norm": 0.28594771241830064, + "acc_norm_stderr": 0.01828048507295468 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291518, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291518 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031236, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031236 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.027979823538744543, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.027979823538744543 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786713, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786713 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516076, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.03713158067481912, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.03713158067481912 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.3776665539033974, + "mc2_stderr": 0.01480593605333011 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091115, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.01697710193260152 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-7b-n-ox-test-v1", + "model_sha": "0eee95b8a16fdeec4b800e0d4d4c9fd2f340f3d1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-7b-n-test-v1/result_2023-11-29 13:28:28.json b/oopsung/llama2-7b-n-test-v1/result_2023-11-29 13:28:28.json new file mode 100644 index 0000000000000000000000000000000000000000..7df01f4b081473fb5a10b3cd8bd8d41b9771017d --- /dev/null +++ b/oopsung/llama2-7b-n-test-v1/result_2023-11-29 13:28:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31569965870307165, + "acc_stderr": 0.013582571095815291, + "acc_norm": 0.378839590443686, + "acc_norm_stderr": 0.014175915490000326 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38488348934475203, + "acc_stderr": 0.004855733568540273, + "acc_norm": 0.4971121290579566, + "acc_norm_stderr": 0.004989698183207841 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3499361430395913, + "acc_stderr": 0.017055679797150426, + "acc_norm": 0.3499361430395913, + "acc_norm_stderr": 0.017055679797150426 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572203, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572203 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341923, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341923 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27741935483870966, + "acc_stderr": 0.025470196835900055, + "acc_norm": 0.27741935483870966, + "acc_norm_stderr": 0.025470196835900055 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3247863247863248, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.3247863247863248, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.02898545565233439, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.02898545565233439 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844075, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3034825870646766, + "acc_stderr": 0.03251006816458618, + "acc_norm": 0.3034825870646766, + "acc_norm_stderr": 0.03251006816458618 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.032166008088022675, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.032166008088022675 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153193, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153193 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3395061728395062, + "acc_stderr": 0.026348564412011624, + "acc_norm": 0.3395061728395062, + "acc_norm_stderr": 0.026348564412011624 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.03097543638684543, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.03097543638684543 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022596, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022596 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.027420477662629242, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.027420477662629242 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053442, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291518, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291518 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000535, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000535 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031232, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031232 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.027833023871399673, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.027833023871399673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786713, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786713 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516076, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.036974422050315967, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.036974422050315967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041855, + "mc2": 0.37643541252753543, + "mc2_stderr": 0.014796413880118433 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091112, + "acc_norm": 0.4203069657615112, + "acc_norm_stderr": 0.016970598281177706 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-7b-n-test-v1", + "model_sha": "da4b8d73de4f71bd8752f16240315120c8409029", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-platypus-7b-f/result_2023-11-27 06:40:38.json b/oopsung/llama2-platypus-7b-f/result_2023-11-27 06:40:38.json new file mode 100644 index 0000000000000000000000000000000000000000..b1579c1c79ce570b87531ef4f8a96b5003b76ed2 --- /dev/null +++ b/oopsung/llama2-platypus-7b-f/result_2023-11-27 06:40:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31569965870307165, + "acc_stderr": 0.013582571095815291, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.01415063143511173 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3813981278629755, + "acc_stderr": 0.004847372670134639, + "acc_norm": 0.4918342959569807, + "acc_norm_stderr": 0.004989115942570064 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457923, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457923 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3486590038314176, + "acc_stderr": 0.017041243143490956, + "acc_norm": 0.3486590038314176, + "acc_norm_stderr": 0.017041243143490956 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412482, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412482 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683229, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683229 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893944, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893944 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229136, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229136 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3067226890756303, + "acc_stderr": 0.029953823891887048, + "acc_norm": 0.3067226890756303, + "acc_norm_stderr": 0.029953823891887048 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2794871794871795, + "acc_stderr": 0.02275238883977682, + "acc_norm": 0.2794871794871795, + "acc_norm_stderr": 0.02275238883977682 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335134, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335134 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3547008547008547, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.3547008547008547, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.02898545565233439, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.02898545565233439 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844075, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.036848815213890225, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.036848815213890225 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.033917503223216613, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.033917503223216613 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.032166008088022675, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.032166008088022675 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.025630824975621348, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.025630824975621348 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.032577140777096614, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.032577140777096614 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.02082814851702259, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.02082814851702259 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.027420477662629235, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.027420477662629235 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.272875816993464, + "acc_stderr": 0.01802047414839358, + "acc_norm": 0.272875816993464, + "acc_norm_stderr": 0.01802047414839358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936484, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936484 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27346938775510204, + "acc_stderr": 0.02853556033712844, + "acc_norm": 0.27346938775510204, + "acc_norm_stderr": 0.02853556033712844 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3755274261603376, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.3755274261603376, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28878748370273793, + "acc_stderr": 0.011574914757219962, + "acc_norm": 0.28878748370273793, + "acc_norm_stderr": 0.011574914757219962 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268048, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268048 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.01500067437357034, + "mc2": 0.37489222578788345, + "mc2_stderr": 0.014767448504749014 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2975206611570248, + "acc_stderr": 0.01571774220508993, + "acc_norm": 0.42266824085005905, + "acc_norm_stderr": 0.0169835060795776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-platypus-7b-f", + "model_sha": "a42c0fc7a148b9a0b977a79bb9460585b1120350", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/openchat/openchat_3.5/result_2023-11-06 11:21:55.json b/openchat/openchat_3.5/result_2023-11-06 11:21:55.json new file mode 100644 index 0000000000000000000000000000000000000000..5d1948b9f03de6aae42b4a4f892b8b0947aa7c80 --- /dev/null +++ b/openchat/openchat_3.5/result_2023-11-06 11:21:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3250853242320819, + "acc_stderr": 0.013688147309729124, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36885082652858, + "acc_stderr": 0.0048150733340006, + "acc_norm": 0.47161919936267677, + "acc_norm_stderr": 0.0049817366895187455 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.017832524079593265, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.017832524079593265 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.028173917761762875, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.028173917761762875 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415845, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415845 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.02528558599001783, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.02528558599001783 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998576, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998576 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502737, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028428, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028428 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412243, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412243 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.0317229500433233, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.0317229500433233 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666533, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666533 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431855, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431855 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.47927145420911593, + "mc2_stderr": 0.01578696956369576 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43211334120425027, + "acc_stderr": 0.017031170198851746, + "acc_norm": 0.4510035419126328, + "acc_norm_stderr": 0.01710761885954935 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "openchat/openchat_3.5", + "model_sha": "0be788e53032214fe8c05d34682a2bbab6ba6580", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/quantumaikr/KoreanLM-1.5b/result_2023-10-18 16:24:34.json b/quantumaikr/KoreanLM-1.5b/result_2023-10-18 16:24:34.json new file mode 100644 index 0000000000000000000000000000000000000000..11b8203e5545b3714e6e2186e9e29f0001aabcc6 --- /dev/null +++ b/quantumaikr/KoreanLM-1.5b/result_2023-10-18 16:24:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21245733788395904, + "acc_stderr": 0.011953482906582949, + "acc_norm": 0.2781569965870307, + "acc_norm_stderr": 0.0130944699195388 + }, + "harness|ko_hellaswag|10": { + "acc": 0.26000796654052977, + "acc_stderr": 0.004377421493297836, + "acc_norm": 0.2647878908583947, + "acc_norm_stderr": 0.004403184691341697 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.03401052620104089, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.03401052620104089 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22988505747126436, + "acc_stderr": 0.015046301846691838, + "acc_norm": 0.22988505747126436, + "acc_norm_stderr": 0.015046301846691838 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977148, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977148 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944966, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944966 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.03138147637575498, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.03138147637575498 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.04093329229834278, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.04093329229834278 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.048580835742663434, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.048580835742663434 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.029597329730978082, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.029597329730978082 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.29743589743589743, + "acc_stderr": 0.023177408131465942, + "acc_norm": 0.29743589743589743, + "acc_norm_stderr": 0.023177408131465942 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.0395783547198098, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.0395783547198098 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.031618563353586114, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.031618563353586114 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.02573654274559452, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.02573654274559452 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.02704685763071668, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.02704685763071668 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.17272727272727273, + "acc_stderr": 0.0362069183392922, + "acc_norm": 0.17272727272727273, + "acc_norm_stderr": 0.0362069183392922 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.024720713193952165, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.024720713193952165 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804726, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804726 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31840796019900497, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.31840796019900497, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184763, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184763 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.20679012345679013, + "acc_stderr": 0.022535006705942818, + "acc_norm": 0.20679012345679013, + "acc_norm_stderr": 0.022535006705942818 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.0314102478056532, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.0314102478056532 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281337, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281337 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26238532110091745, + "acc_stderr": 0.018861885021534734, + "acc_norm": 0.26238532110091745, + "acc_norm_stderr": 0.018861885021534734 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604674, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604674 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.03896878985070415, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.03896878985070415 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.03611780560284898, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.03611780560284898 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.016992723465466233, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.016992723465466233 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.02646903681859063, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.02646903681859063 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953185, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966339, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966339 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1801470588235294, + "acc_stderr": 0.02334516361654486, + "acc_norm": 0.1801470588235294, + "acc_norm_stderr": 0.02334516361654486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249783, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.010865436690780269, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.010865436690780269 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522517, + "mc2": 0.5207557813698324, + "mc2_stderr": 0.01656184952031738 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2089728453364817, + "acc_stderr": 0.013978334944170286, + "acc_norm": 0.4757969303423849, + "acc_norm_stderr": 0.01717020246652075 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "quantumaikr/KoreanLM-1.5b", + "model_sha": "d26b261612f7cf8358309921bc387b754596355f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/quantumaikr/KoreanLM-3B/result_2023-10-18 16:24:22.json b/quantumaikr/KoreanLM-3B/result_2023-10-18 16:24:22.json new file mode 100644 index 0000000000000000000000000000000000000000..f399854d9bdc2aa16706c29a43471c84fb7ec1f3 --- /dev/null +++ b/quantumaikr/KoreanLM-3B/result_2023-10-18 16:24:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19880546075085323, + "acc_stderr": 0.011662850198175536, + "acc_norm": 0.24488054607508533, + "acc_norm_stderr": 0.012566273985131356 + }, + "harness|ko_hellaswag|10": { + "acc": 0.27106154152559253, + "acc_stderr": 0.004435993492583864, + "acc_norm": 0.27753435570603463, + "acc_norm_stderr": 0.004468672138910928 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824563, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824563 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24265644955300128, + "acc_stderr": 0.015329888940899873, + "acc_norm": 0.24265644955300128, + "acc_norm_stderr": 0.015329888940899873 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386694, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386694 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.18971061093247588, + "acc_stderr": 0.022268196258783218, + "acc_norm": 0.18971061093247588, + "acc_norm_stderr": 0.022268196258783218 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.0274796030105388, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.0274796030105388 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.03095663632856655, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.03095663632856655 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33076923076923076, + "acc_stderr": 0.023854795680971142, + "acc_norm": 0.33076923076923076, + "acc_norm_stderr": 0.023854795680971142 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23870967741935484, + "acc_stderr": 0.024251071262208834, + "acc_norm": 0.23870967741935484, + "acc_norm_stderr": 0.024251071262208834 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.20754716981132076, + "acc_stderr": 0.024959918028911274, + "acc_norm": 0.20754716981132076, + "acc_norm_stderr": 0.024959918028911274 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2085889570552147, + "acc_stderr": 0.03192193448934725, + "acc_norm": 0.2085889570552147, + "acc_norm_stderr": 0.03192193448934725 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.20987654320987653, + "acc_stderr": 0.02265834408598137, + "acc_norm": 0.20987654320987653, + "acc_norm_stderr": 0.02265834408598137 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818115, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818115 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.023420375478296125, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.023420375478296125 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.01755581809132227, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.01755581809132227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843003, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843003 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.02671143055553841, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.02671143055553841 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.027820781981149675, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.027820781981149675 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522519, + "mc2": 0.4822371041865183, + "mc2_stderr": 0.01604938696224229 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605971, + "acc_norm": 0.4769775678866588, + "acc_norm_stderr": 0.01717212154672764 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "quantumaikr/KoreanLM-3B", + "model_sha": "f49217779eea253aa3e7dd4645eedfd496fa9e0b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/quantumaikr/KoreanLM/result_2023-10-18 16:23:36.json b/quantumaikr/KoreanLM/result_2023-10-18 16:23:36.json new file mode 100644 index 0000000000000000000000000000000000000000..a1e42c732272441b1cb1ba50a459503091697341 --- /dev/null +++ b/quantumaikr/KoreanLM/result_2023-10-18 16:23:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2619453924914676, + "acc_stderr": 0.012849054826858117, + "acc_norm": 0.30119453924914674, + "acc_norm_stderr": 0.01340674176784762 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3179645488946425, + "acc_stderr": 0.004647338877642185, + "acc_norm": 0.3739294961163115, + "acc_norm_stderr": 0.004828564090620291 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2988505747126437, + "acc_stderr": 0.016369256815093127, + "acc_norm": 0.2988505747126437, + "acc_norm_stderr": 0.016369256815093127 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2604501607717042, + "acc_stderr": 0.02492672322484555, + "acc_norm": 0.2604501607717042, + "acc_norm_stderr": 0.02492672322484555 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462203, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462203 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.037528339580033376, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.037528339580033376 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936244, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936244 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136084, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136084 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02144454730156048, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02144454730156048 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764815, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764815 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4188034188034188, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.4188034188034188, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.02761116340239972, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.02761116340239972 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371215, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014635, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014635 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194974, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194974 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577622, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.02483605786829468, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.02483605786829468 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.018125669180861493, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.018125669180861493 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.02617390850671858, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.02617390850671858 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.033176727875331574, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.033176727875331574 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318082, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318082 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322716, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322716 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.024562204314142314, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.024562204314142314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.16326530612244897, + "acc_stderr": 0.023661699177098604, + "acc_norm": 0.16326530612244897, + "acc_norm_stderr": 0.023661699177098604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584353, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771316, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145628, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145628 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015016, + "mc2": 0.42260296070190784, + "mc2_stderr": 0.015435227733476522 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791248, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.0166058012892126 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "quantumaikr/KoreanLM", + "model_sha": "f4351abcdd6a933afbaffad0badf60c273e71920", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/rufjdk5480/ko-llama7b-merged/result_2023-12-04 07:04:26.json b/rufjdk5480/ko-llama7b-merged/result_2023-12-04 07:04:26.json new file mode 100644 index 0000000000000000000000000000000000000000..6d638289d08925842f996a522f6d0f49864e995c --- /dev/null +++ b/rufjdk5480/ko-llama7b-merged/result_2023-12-04 07:04:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2645051194539249, + "acc_stderr": 0.01288927294931337, + "acc_norm": 0.30887372013651876, + "acc_norm_stderr": 0.013501770929344 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33479386576379205, + "acc_stderr": 0.004709538864916327, + "acc_norm": 0.4118701453893647, + "acc_norm_stderr": 0.0049116598845061485 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36270753512132825, + "acc_stderr": 0.0171927086746023, + "acc_norm": 0.36270753512132825, + "acc_norm_stderr": 0.0171927086746023 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.35691318327974275, + "acc_stderr": 0.027210420375934012, + "acc_norm": 0.35691318327974275, + "acc_norm_stderr": 0.027210420375934012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.031493846709941306, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.031493846709941306 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.32323232323232326, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.32323232323232326, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.039609335494512087, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.039609335494512087 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3067226890756303, + "acc_stderr": 0.029953823891887044, + "acc_norm": 0.3067226890756303, + "acc_norm_stderr": 0.029953823891887044 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.29743589743589743, + "acc_stderr": 0.023177408131465942, + "acc_norm": 0.29743589743589743, + "acc_norm_stderr": 0.023177408131465942 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.02637756702864586, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.02637756702864586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4829059829059829, + "acc_stderr": 0.032736940493481824, + "acc_norm": 0.4829059829059829, + "acc_norm_stderr": 0.032736940493481824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670716, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670716 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230186, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230186 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.034791855725996614, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.034791855725996614 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4577114427860697, + "acc_stderr": 0.03522865864099597, + "acc_norm": 0.4577114427860697, + "acc_norm_stderr": 0.03522865864099597 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.02241804289111394, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.02241804289111394 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554859, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554859 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3487654320987654, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.3487654320987654, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.034234651001042844, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.034234651001042844 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.01898746225797865, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.01898746225797865 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.045190820213197716, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.045190820213197716 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101355, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101355 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.026040662474201278, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.026040662474201278 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235922, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235922 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.030587326294702365, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.030587326294702365 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26988265971316816, + "acc_stderr": 0.011337381084250394, + "acc_norm": 0.26988265971316816, + "acc_norm_stderr": 0.011337381084250394 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062788, + "mc2": 0.46317433331488955, + "mc2_stderr": 0.015481757792093615 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605971, + "acc_norm": 0.345926800472255, + "acc_norm_stderr": 0.01635385341434757 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "rufjdk5480/ko-llama7b-merged", + "model_sha": "210250b684221c12bf9593c72f94e6b6ce5e12e7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sanghwa-na/llama2-13b.kor.v1/result_2023-10-30 07:41:35.json b/sanghwa-na/llama2-13b.kor.v1/result_2023-10-30 07:41:35.json new file mode 100644 index 0000000000000000000000000000000000000000..f212d5b6a323c61310e324a08bad8dd040783bc9 --- /dev/null +++ b/sanghwa-na/llama2-13b.kor.v1/result_2023-10-30 07:41:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.01365998089427738, + "acc_norm": 0.3796928327645051, + "acc_norm_stderr": 0.014182119866974872 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36168094005178253, + "acc_stderr": 0.004795051037917731, + "acc_norm": 0.4652459669388568, + "acc_norm_stderr": 0.004977713073899333 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.0381107966983353, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.0381107966983353 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.01787469866749135, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.01787469866749135 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.036643147772880864, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.036643147772880864 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336019, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336019 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4494949494949495, + "acc_stderr": 0.03544132491947969, + "acc_norm": 0.4494949494949495, + "acc_norm_stderr": 0.03544132491947969 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.024756000382130945, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.024756000382130945 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347354, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347354 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03255326307272487, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03255326307272487 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.36981132075471695, + "acc_stderr": 0.029711421880107922, + "acc_norm": 0.36981132075471695, + "acc_norm_stderr": 0.029711421880107922 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02712511551316686, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02712511551316686 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41651376146788993, + "acc_stderr": 0.021136376504030878, + "acc_norm": 0.41651376146788993, + "acc_norm_stderr": 0.021136376504030878 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.02833239748366427, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.02833239748366427 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092487, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092487 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.042032772914677614, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.042032772914677614 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025445, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146291, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146291 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.029923100563683913, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.029923100563683913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30247718383311606, + "acc_stderr": 0.011731524234165703, + "acc_norm": 0.30247718383311606, + "acc_norm_stderr": 0.011731524234165703 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.033540924375915195, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.033540924375915195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.4489814958425013, + "mc2_stderr": 0.015305629142879413 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45336481700118064, + "acc_stderr": 0.01711541822522687, + "acc_norm": 0.564344746162928, + "acc_norm_stderr": 0.017047415229476316 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sanghwa-na/llama2-13b.kor.v1", + "model_sha": "c9b4aa22c6fe71a0e0deb30dc58dc40ad83637db", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sanghwa-na/llama2-13b.kor.v2/result_2023-11-02 10:05:17.json b/sanghwa-na/llama2-13b.kor.v2/result_2023-11-02 10:05:17.json new file mode 100644 index 0000000000000000000000000000000000000000..152c2b928d1d31849e29a75611d9a7ccff9ae540 --- /dev/null +++ b/sanghwa-na/llama2-13b.kor.v2/result_2023-11-02 10:05:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938215, + "acc_norm": 0.44197952218430037, + "acc_norm_stderr": 0.014512682523128347 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41206930890260907, + "acc_stderr": 0.004912015369160081, + "acc_norm": 0.5485958972316272, + "acc_norm_stderr": 0.004966158142645405 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.017875748840242407, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.017875748840242407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122155, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122155 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561953, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561953 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.030463656747340265, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.030463656747340265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.03522865864099598, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.03522865864099598 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.036812296333943194, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.036812296333943194 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361823, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361823 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.03874102859818081, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.03874102859818081 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194045, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281335, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281335 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44587155963302755, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.44587155963302755, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249032, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249032 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.01924978569171721, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.01924978569171721 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.026679252270103124, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.026679252270103124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3089960886571056, + "acc_stderr": 0.01180172977723924, + "acc_norm": 0.3089960886571056, + "acc_norm_stderr": 0.01180172977723924 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834562, + "mc2": 0.4302371643016868, + "mc2_stderr": 0.015064732208462774 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.016900062879427115, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.017156666859785476 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sanghwa-na/llama2-13b.kor.v2", + "model_sha": "677c1badec4e73f7c98b8a8d2bab178a2330a330", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sanghwa-na/llama2-13b.kor/result_2023-10-27 14:03:39.json b/sanghwa-na/llama2-13b.kor/result_2023-10-27 14:03:39.json new file mode 100644 index 0000000000000000000000000000000000000000..cca05930eca5998e31834209e645449e85c9d8a1 --- /dev/null +++ b/sanghwa-na/llama2-13b.kor/result_2023-10-27 14:03:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35665529010238906, + "acc_stderr": 0.013998056902620196, + "acc_norm": 0.3873720136518771, + "acc_norm_stderr": 0.014235872487909874 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3745269866560446, + "acc_stderr": 0.004830113797327052, + "acc_norm": 0.48645688109938257, + "acc_norm_stderr": 0.004987950663406551 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.017879248970584398, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.017879248970584398 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540632, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.36981132075471695, + "acc_stderr": 0.029711421880107926, + "acc_norm": 0.36981132075471695, + "acc_norm_stderr": 0.029711421880107926 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.47761194029850745, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.47761194029850745, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535896, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535896 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261837, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261837 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860807, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4055045871559633, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.4055045871559633, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092484, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092484 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777565, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777565 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3346938775510204, + "acc_stderr": 0.030209235226242307, + "acc_norm": 0.3346938775510204, + "acc_norm_stderr": 0.030209235226242307 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.01180859826250332, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.01180859826250332 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904718, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904718 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.42845182361852463, + "mc2_stderr": 0.015173261969256705 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43565525383707204, + "acc_stderr": 0.017047415229476327, + "acc_norm": 0.577331759149941, + "acc_norm_stderr": 0.016983506079577607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sanghwa-na/llama2-13b.kor", + "model_sha": "1e0dfa0e076117cf22754ff55a94bac1f72672ca", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sanghwa-na/mistrallite.kor/result_2023-10-29 10:51:09.json b/sanghwa-na/mistrallite.kor/result_2023-10-29 10:51:09.json new file mode 100644 index 0000000000000000000000000000000000000000..953ca82ac316c28a654ec3b20e979461b9a26f23 --- /dev/null +++ b/sanghwa-na/mistrallite.kor/result_2023-10-29 10:51:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3054607508532423, + "acc_stderr": 0.0134600804780025, + "acc_norm": 0.34044368600682595, + "acc_norm_stderr": 0.013847460518892976 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3329018123879705, + "acc_stderr": 0.004702886273189413, + "acc_norm": 0.4133638717386975, + "acc_norm_stderr": 0.004914305798575699 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.03660298834049162, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.03660298834049162 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.421455938697318, + "acc_stderr": 0.017657976412654857, + "acc_norm": 0.421455938697318, + "acc_norm_stderr": 0.017657976412654857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.03286745312567961, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.03286745312567961 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.040287315329475604, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.040287315329475604 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347357, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347357 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557838, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557838 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.0472457740573157, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.0472457740573157 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4925373134328358, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.4925373134328358, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.033205564430855705, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.033205564430855705 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983063, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983063 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.025816756791584197, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.025816756791584197 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38860103626943004, + "acc_stderr": 0.03517739796373131, + "acc_norm": 0.38860103626943004, + "acc_norm_stderr": 0.03517739796373131 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3614678899082569, + "acc_stderr": 0.020598082009937374, + "acc_norm": 0.3614678899082569, + "acc_norm_stderr": 0.020598082009937374 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.02795604616542451, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.02795604616542451 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252603, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252603 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.01869085027359529, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.01869085027359529 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.02981263070156974, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.02981263070156974 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4050632911392405, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.4050632911392405, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803529, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803529 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674119, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674119 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384308, + "mc2": 0.45952110519785677, + "mc2_stderr": 0.01589536011034475 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2833530106257379, + "acc_stderr": 0.015492852084597232, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.01701984753597222 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sanghwa-na/mistrallite.kor", + "model_sha": "7fa22118f6d96ee3873002e4ec1ccdc0dd53d976", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/seungduk/KoSOLAR-10.7B-v0.1/result_2023-12-28 09:04:26.json b/seungduk/KoSOLAR-10.7B-v0.1/result_2023-12-28 09:04:26.json new file mode 100644 index 0000000000000000000000000000000000000000..aa2e03065b1eb116e3ebf882d2bd6ce02fbf1a9e --- /dev/null +++ b/seungduk/KoSOLAR-10.7B-v0.1/result_2023-12-28 09:04:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4180887372013652, + "acc_stderr": 0.01441398839699608, + "acc_norm": 0.4718430034129693, + "acc_norm_stderr": 0.014588204105102202 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4325831507667795, + "acc_stderr": 0.004944215937021397, + "acc_norm": 0.595399322844055, + "acc_norm_stderr": 0.00489811511097503 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6023391812865497, + "acc_stderr": 0.03753638955761691, + "acc_norm": 0.6023391812865497, + "acc_norm_stderr": 0.03753638955761691 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6334610472541508, + "acc_stderr": 0.017231244626797027, + "acc_norm": 0.6334610472541508, + "acc_norm_stderr": 0.017231244626797027 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542125, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542125 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789959, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789959 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5562700964630225, + "acc_stderr": 0.028217683556652308, + "acc_norm": 0.5562700964630225, + "acc_norm_stderr": 0.028217683556652308 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6793893129770993, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.6793893129770993, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.032586303838365555, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.032586303838365555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.025294608023986462, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.025294608023986462 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165904, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165904 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700914, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700914 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.047245774057315726, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.047245774057315726 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.032357437893550445, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.032357437893550445 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.02516798233389415, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.02516798233389415 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.027513747284379428, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.027513747284379428 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6458715596330276, + "acc_stderr": 0.02050472901382911, + "acc_norm": 0.6458715596330276, + "acc_norm_stderr": 0.02050472901382911 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49836601307189543, + "acc_stderr": 0.020227726838150117, + "acc_norm": 0.49836601307189543, + "acc_norm_stderr": 0.020227726838150117 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.02872386385328128, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.02872386385328128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331152, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331152 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.03164209487942942, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.03164209487942942 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036423, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39374185136897, + "acc_stderr": 0.012478532272564432, + "acc_norm": 0.39374185136897, + "acc_norm_stderr": 0.012478532272564432 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088298, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088298 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253597, + "mc2": 0.4183988499603794, + "mc2_stderr": 0.015219858524944204 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5808736717827627, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.016738130760321747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "seungduk/KoSOLAR-10.7B-v0.1", + "model_sha": "65c534a804afe9e7b207dc3ada10b1b08d5deda0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-ck-sft/result_2023-12-03 02:59:41.json b/shangrilar/llama-2-ko-7b-ck-sft/result_2023-12-03 02:59:41.json new file mode 100644 index 0000000000000000000000000000000000000000..6d85bf3e0911355286bba558e83574560bc9ec7a --- /dev/null +++ b/shangrilar/llama-2-ko-7b-ck-sft/result_2023-12-03 02:59:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30887372013651876, + "acc_stderr": 0.013501770929344003, + "acc_norm": 0.3651877133105802, + "acc_norm_stderr": 0.0140702655192688 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3851822346146186, + "acc_stderr": 0.0048564379557198565, + "acc_norm": 0.5034853614817766, + "acc_norm_stderr": 0.004989660180792183 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.0352821125824523, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.0352821125824523 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.016997123346113432, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.016997123346113432 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380052, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380052 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.034106466140718564, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.034106466140718564 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36977491961414793, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.36977491961414793, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886838, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886838 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.258974358974359, + "acc_stderr": 0.022211106810061672, + "acc_norm": 0.258974358974359, + "acc_norm_stderr": 0.022211106810061672 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3387096774193548, + "acc_stderr": 0.026923446059302844, + "acc_norm": 0.3387096774193548, + "acc_norm_stderr": 0.026923446059302844 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940588, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940588 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173044, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173044 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.024405173935783238, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.024405173935783238 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.025630824975621344, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.025630824975621344 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3137614678899083, + "acc_stderr": 0.019894723341469127, + "acc_norm": 0.3137614678899083, + "acc_norm_stderr": 0.019894723341469127 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484587, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484587 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.02668456434046099, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.02668456434046099 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335314, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3080168776371308, + "acc_stderr": 0.0300523893356057, + "acc_norm": 0.3080168776371308, + "acc_norm_stderr": 0.0300523893356057 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27509778357235987, + "acc_stderr": 0.01140544362099692, + "acc_norm": 0.27509778357235987, + "acc_norm_stderr": 0.01140544362099692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2215422276621787, + "mc1_stderr": 0.014537867601301142, + "mc2": 0.36630728160236614, + "mc2_stderr": 0.014877510069800457 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30460448642266824, + "acc_stderr": 0.015823367273129392, + "acc_norm": 0.3730814639905549, + "acc_norm_stderr": 0.016627318275137443 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-ck-sft", + "model_sha": "5320a852b356e0d909c7b47330714de4f11c43fb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-ck-sfte/result_2023-12-03 03:00:06.json b/shangrilar/llama-2-ko-7b-ck-sfte/result_2023-12-03 03:00:06.json new file mode 100644 index 0000000000000000000000000000000000000000..81dbb6dbdb9d3feeebd2b35dcaf1c60087a6ec7e --- /dev/null +++ b/shangrilar/llama-2-ko-7b-ck-sfte/result_2023-12-03 03:00:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3037542662116041, + "acc_stderr": 0.013438909184778757, + "acc_norm": 0.35238907849829354, + "acc_norm_stderr": 0.013960142600598678 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38418641704839673, + "acc_stderr": 0.004854082479916908, + "acc_norm": 0.49900418243377814, + "acc_norm_stderr": 0.004989771515176699 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579215, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3550446998722861, + "acc_stderr": 0.017112085772772984, + "acc_norm": 0.3550446998722861, + "acc_norm_stderr": 0.017112085772772984 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880557, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880557 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233137, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233137 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3504823151125402, + "acc_stderr": 0.027098652621301747, + "acc_norm": 0.3504823151125402, + "acc_norm_stderr": 0.027098652621301747 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.02804796722417689, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.02804796722417689 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.02248938979365483, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.02248938979365483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.405982905982906, + "acc_stderr": 0.03217180182641087, + "acc_norm": 0.405982905982906, + "acc_norm_stderr": 0.03217180182641087 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.044612721759105065, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.044612721759105065 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.025497532639609556, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.025497532639609556 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696545, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696545 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919797, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919797 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173044, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173044 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415412, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415412 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.02481835012943659, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.02481835012943659 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.025842248700902164, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.025842248700902164 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.03074890536390989, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.03074890536390989 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28990825688073396, + "acc_stderr": 0.0194530666092016, + "acc_norm": 0.28990825688073396, + "acc_norm_stderr": 0.0194530666092016 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.02617390850671858, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.02617390850671858 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.03611780560284898, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.03611780560284898 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.018663359671463677, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.018663359671463677 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046944, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046944 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144693, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144693 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.1836734693877551, + "acc_stderr": 0.024789071332007622, + "acc_norm": 0.1836734693877551, + "acc_norm_stderr": 0.024789071332007622 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.28270042194092826, + "acc_stderr": 0.029312814153955934, + "acc_norm": 0.28270042194092826, + "acc_norm_stderr": 0.029312814153955934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845528, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845528 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237033, + "mc2": 0.3920379268511032, + "mc2_stderr": 0.014920139415816195 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2975206611570248, + "acc_stderr": 0.015717742205089914, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.01653869160332771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-ck-sfte", + "model_sha": "6efe784c199bd0f1a4136fa1803d0b77a1f4f23a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-ck-sftem/result_2023-12-03 03:01:32.json b/shangrilar/llama-2-ko-7b-ck-sftem/result_2023-12-03 03:01:32.json new file mode 100644 index 0000000000000000000000000000000000000000..9d6eada4f91fda46424eb108c7678e5fabe96997 --- /dev/null +++ b/shangrilar/llama-2-ko-7b-ck-sftem/result_2023-12-03 03:01:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29948805460750855, + "acc_stderr": 0.013385021637313572, + "acc_norm": 0.3677474402730375, + "acc_norm_stderr": 0.014090995618168478 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38627763393746267, + "acc_stderr": 0.0048590041846946095, + "acc_norm": 0.5016928898625772, + "acc_norm_stderr": 0.004989752811173406 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.351213282247765, + "acc_stderr": 0.017069982051499427, + "acc_norm": 0.351213282247765, + "acc_norm_stderr": 0.017069982051499427 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.029644006577009618, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.029644006577009618 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648022, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648022 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.032586303838365555, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.032586303838365555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863814, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863814 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204426, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204426 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617722, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617722 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.025988500792411887, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.025988500792411887 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3803418803418803, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.3803418803418803, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741716, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741716 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.27860696517412936, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.27860696517412936, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.024027745155265016, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.024027745155265016 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.03487825168497892, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.03487825168497892 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33024691358024694, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.33024691358024694, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700293, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700293 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28256880733944956, + "acc_stderr": 0.019304243497707152, + "acc_norm": 0.28256880733944956, + "acc_norm_stderr": 0.019304243497707152 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.0339549002085611, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.0339549002085611 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.02656892101545715, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.02656892101545715 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318082, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318082 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460987, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460987 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.02649191472735513, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.02649191472735513 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1948529411764706, + "acc_stderr": 0.024060599423487424, + "acc_norm": 0.1948529411764706, + "acc_norm_stderr": 0.024060599423487424 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2, + "acc_stderr": 0.02560737598657916, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02560737598657916 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.030274974880218977, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.030274974880218977 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.01122252816977131, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.01122252816977131 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507957, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507957 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511785, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511785 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.01497482727975233, + "mc2": 0.3838342466674412, + "mc2_stderr": 0.0149406355520632 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2727272727272727, + "acc_stderr": 0.015311853110300352, + "acc_norm": 0.31641086186540734, + "acc_norm_stderr": 0.015989617951065477 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-ck-sftem", + "model_sha": "f76cbec4da7eaa0823db2a9f25a9f1e8fb305567", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-ck-sftm/result_2023-12-03 02:59:57.json b/shangrilar/llama-2-ko-7b-ck-sftm/result_2023-12-03 02:59:57.json new file mode 100644 index 0000000000000000000000000000000000000000..8dfad45d12e5f6861e691174df1991f7f848d16c --- /dev/null +++ b/shangrilar/llama-2-ko-7b-ck-sftm/result_2023-12-03 02:59:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3216723549488055, + "acc_stderr": 0.013650488084494164, + "acc_norm": 0.371160409556314, + "acc_norm_stderr": 0.014117971901142817 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3851822346146186, + "acc_stderr": 0.004856437955719859, + "acc_norm": 0.5043815972913762, + "acc_norm_stderr": 0.00498958981618023 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.0352821125824523, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.0352821125824523 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.32567049808429116, + "acc_stderr": 0.016757989458549682, + "acc_norm": 0.32567049808429116, + "acc_norm_stderr": 0.016757989458549682 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.034605799075530276, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.034605799075530276 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004913, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004913 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082394, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082394 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124498, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124498 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.02788682807838058, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.02788682807838058 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.02176373368417392, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.02176373368417392 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733545, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.02556060472102289, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.02556060472102289 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3418803418803419, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.3418803418803419, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741713, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505415, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505415 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008936, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1907514450867052, + "acc_stderr": 0.029957851329869337, + "acc_norm": 0.1907514450867052, + "acc_norm_stderr": 0.029957851329869337 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.024027745155265016, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.024027745155265016 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.025407197798890155, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.025407197798890155 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.02977866303775295, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.02977866303775295 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24954128440366974, + "acc_stderr": 0.018553897629501624, + "acc_norm": 0.24954128440366974, + "acc_norm_stderr": 0.018553897629501624 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0252616912197295, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0252616912197295 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302871, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302871 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.03279000406310049, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.03279000406310049 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.01777694715752803, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.01777694715752803 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.025416428388767478, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.025416428388767478 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.026431329870789538, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.026431329870789538 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.02540930195322568, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.02540930195322568 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23989569752281617, + "acc_stderr": 0.010906282617981648, + "acc_norm": 0.23989569752281617, + "acc_norm_stderr": 0.010906282617981648 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967409, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967409 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22766217870257038, + "mc1_stderr": 0.01467925503211107, + "mc2": 0.3687513230749265, + "mc2_stderr": 0.014956938558145557 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29161747343565525, + "acc_stderr": 0.015626276690070242, + "acc_norm": 0.3624557260920897, + "acc_norm_stderr": 0.016527131240453706 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-ck-sftm", + "model_sha": "98291ad089efcc9013a22607275f4b4589a91e7e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-ck/result_2023-12-02 13:15:15.json b/shangrilar/llama-2-ko-7b-ck/result_2023-12-02 13:15:15.json new file mode 100644 index 0000000000000000000000000000000000000000..1f182e8cf4750dd3252d7aa06c92338da195672b --- /dev/null +++ b/shangrilar/llama-2-ko-7b-ck/result_2023-12-02 13:15:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.01359243151906808, + "acc_norm": 0.3796928327645051, + "acc_norm_stderr": 0.014182119866974872 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38109938259310894, + "acc_stderr": 0.004846643735666549, + "acc_norm": 0.5036845249950209, + "acc_norm_stderr": 0.004989645929811448 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3563218390804598, + "acc_stderr": 0.0171258537627559, + "acc_norm": 0.3563218390804598, + "acc_norm_stderr": 0.0171258537627559 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996796, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996796 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231004, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740749, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740749 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648022, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648022 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419996, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419996 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342863, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342863 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971524, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971524 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650743, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650743 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0317852971064275, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0317852971064275 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895991, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895991 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.27860696517412936, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.27860696517412936, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240017, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240017 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.034370793441061344, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.034370793441061344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.024818350129436593, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.024818350129436593 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.026229649178821157, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.026229649178821157 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3761467889908257, + "acc_stderr": 0.020769231968205074, + "acc_norm": 0.3761467889908257, + "acc_norm_stderr": 0.020769231968205074 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110307, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110307 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.017740899509177795, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.017740899509177795 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03068582059661081, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03068582059661081 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27183833116036504, + "acc_stderr": 0.011363135278651411, + "acc_norm": 0.27183833116036504, + "acc_norm_stderr": 0.011363135278651411 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731618, + "mc2": 0.3746009857468504, + "mc2_stderr": 0.01475413087944273 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2680047225501771, + "acc_stderr": 0.015227905796335145, + "acc_norm": 0.38488783943329397, + "acc_norm_stderr": 0.016728579701498658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-ck", + "model_sha": "de35087a933260a50d9260fd5ccf1247e43c1c5a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-sft/result_2023-12-03 07:45:17.json b/shangrilar/llama-2-ko-7b-sft/result_2023-12-03 07:45:17.json new file mode 100644 index 0000000000000000000000000000000000000000..bbb1efc28127d35b0e3cf4461333ebf85b16bfcb --- /dev/null +++ b/shangrilar/llama-2-ko-7b-sft/result_2023-12-03 07:45:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.302901023890785, + "acc_stderr": 0.013428241573185349, + "acc_norm": 0.35580204778157, + "acc_norm_stderr": 0.013990571137918762 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3829914359689305, + "acc_stderr": 0.004851227527070894, + "acc_norm": 0.5017924716191994, + "acc_norm_stderr": 0.004989749347461088 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03565079670708312, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03565079670708312 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3397190293742018, + "acc_stderr": 0.01693639411430165, + "acc_norm": 0.3397190293742018, + "acc_norm_stderr": 0.01693639411430165 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3247588424437299, + "acc_stderr": 0.026596782287697043, + "acc_norm": 0.3247588424437299, + "acc_norm_stderr": 0.026596782287697043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.040103589424622034, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.040103589424622034 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342853, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342853 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132354, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358607, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358607 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.02637756702864586, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.02637756702864586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.028254200344438655, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.028254200344438655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959305, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959305 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.032147373020294696, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.032147373020294696 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.024405173935783234, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.024405173935783234 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32098765432098764, + "acc_stderr": 0.025976566010862744, + "acc_norm": 0.32098765432098764, + "acc_norm_stderr": 0.025976566010862744 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354115, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354115 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27522935779816515, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.27522935779816515, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.02664327847450875, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.02664327847450875 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.272875816993464, + "acc_stderr": 0.01802047414839358, + "acc_norm": 0.272875816993464, + "acc_norm_stderr": 0.01802047414839358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.0305467452649532, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.0305467452649532 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670736, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.02721283588407315, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.02721283588407315 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0306858205966108, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0306858205966108 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27835723598435463, + "acc_stderr": 0.011446990197380985, + "acc_norm": 0.27835723598435463, + "acc_norm_stderr": 0.011446990197380985 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.031822318676475544, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.031822318676475544 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041857, + "mc2": 0.3666390551157725, + "mc2_stderr": 0.014763767483291076 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605973, + "acc_norm": 0.3577331759149941, + "acc_norm_stderr": 0.01647980893574998 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-sft", + "model_sha": "c46445b8e3d815fb4d36d7deca69343f2a7df17a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-sfte/result_2023-12-03 07:45:21.json b/shangrilar/llama-2-ko-7b-sfte/result_2023-12-03 07:45:21.json new file mode 100644 index 0000000000000000000000000000000000000000..a649da1a737f3a328e9d7160d55698678e25e02d --- /dev/null +++ b/shangrilar/llama-2-ko-7b-sfte/result_2023-12-03 07:45:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623504, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.014056207319068285 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38348934475204144, + "acc_stderr": 0.004852420856631481, + "acc_norm": 0.5002987452698665, + "acc_norm_stderr": 0.004989780520782245 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36909323116219667, + "acc_stderr": 0.017256283109124616, + "acc_norm": 0.36909323116219667, + "acc_norm_stderr": 0.017256283109124616 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.034843315926805875, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.034843315926805875 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3665594855305466, + "acc_stderr": 0.02736807824397162, + "acc_norm": 0.3665594855305466, + "acc_norm_stderr": 0.02736807824397162 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.037800192304380135, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.037800192304380135 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176896, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176896 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678243, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678243 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.025988500792411894, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.025988500792411894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360382, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360382 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.02530525813187971, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.02530525813187971 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.31790123456790126, + "acc_stderr": 0.025910063528240865, + "acc_norm": 0.31790123456790126, + "acc_norm_stderr": 0.025910063528240865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3100917431192661, + "acc_stderr": 0.01983084968443975, + "acc_norm": 0.3100917431192661, + "acc_norm_stderr": 0.01983084968443975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.026992544339297243, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.026992544339297243 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119669, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119669 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.018463154132632824, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.018463154132632824 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859655, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859655 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35443037974683544, + "acc_stderr": 0.03113730429718581, + "acc_norm": 0.35443037974683544, + "acc_norm_stderr": 0.03113730429718581 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516076, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299946, + "mc2": 0.3889465942097803, + "mc2_stderr": 0.01485269997681454 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2715466351829988, + "acc_stderr": 0.015291071117310382, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.016637917789798742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-sfte", + "model_sha": "7bc9f9eecb357e17900b9a99a579820f5b130afe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-sftem/result_2023-12-03 07:45:31.json b/shangrilar/llama-2-ko-7b-sftem/result_2023-12-03 07:45:31.json new file mode 100644 index 0000000000000000000000000000000000000000..8f0b6081b732ca974a886ccc76b77ace2b717a23 --- /dev/null +++ b/shangrilar/llama-2-ko-7b-sftem/result_2023-12-03 07:45:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3267918088737201, + "acc_stderr": 0.01370666597558734, + "acc_norm": 0.37457337883959047, + "acc_norm_stderr": 0.014144193471893447 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38627763393746267, + "acc_stderr": 0.00485900418469461, + "acc_norm": 0.4965146385182235, + "acc_norm_stderr": 0.004989660180792161 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393162, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393162 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.35759897828863346, + "acc_stderr": 0.017139488998803288, + "acc_norm": 0.35759897828863346, + "acc_norm_stderr": 0.017139488998803288 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.03013590647851756, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.03013590647851756 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004913, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004913 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.02820554503327772, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.02820554503327772 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971524, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971524 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233484, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233484 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.02637756702864586, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.02637756702864586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.028637235639800925, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.028637235639800925 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302505, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302505 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766128, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766128 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119996, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119996 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935556, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935556 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173043, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173043 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184756, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184756 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.024476994076247326, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.024476994076247326 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3395061728395062, + "acc_stderr": 0.026348564412011635, + "acc_norm": 0.3395061728395062, + "acc_norm_stderr": 0.026348564412011635 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.0314102478056532, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.0314102478056532 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3155963302752294, + "acc_stderr": 0.019926117513869666, + "acc_norm": 0.3155963302752294, + "acc_norm_stderr": 0.019926117513869666 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.026716118380156847, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.026716118380156847 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810536, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810536 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.02689170942834396, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.02689170942834396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.026491914727355143, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.026491914727355143 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.027257202606114944, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.027257202606114944 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.02768297952296023, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.02768297952296023 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31223628691983124, + "acc_stderr": 0.030165137867847008, + "acc_norm": 0.31223628691983124, + "acc_norm_stderr": 0.030165137867847008 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2848761408083442, + "acc_stderr": 0.011527830846368999, + "acc_norm": 0.2848761408083442, + "acc_norm_stderr": 0.011527830846368999 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.01476194517486267, + "mc2": 0.3765048864381823, + "mc2_stderr": 0.014810224803999636 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.269185360094451, + "acc_stderr": 0.015249098024144526, + "acc_norm": 0.35182998819362454, + "acc_norm_stderr": 0.016418206451218054 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-sftem", + "model_sha": "4bd1271926402613fc82bf548033028b4467ccbe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama2-ko-7b-kullm-base/result_2023-10-03 23:40:33.json b/shangrilar/llama2-ko-7b-kullm-base/result_2023-10-03 23:40:33.json new file mode 100644 index 0000000000000000000000000000000000000000..c20d9cff6f0760569ec2e701faf093db8a1a2ef5 --- /dev/null +++ b/shangrilar/llama2-ko-7b-kullm-base/result_2023-10-03 23:40:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349814 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38309101772555265, + "acc_stderr": 0.004851466623601449, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.016905207420803554, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.016905207420803554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880557, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880557 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.40404040404040403, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.40404040404040403, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.030283995525884396, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.030283995525884396 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.02255655101013235, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.02255655101013235 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671746, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671746 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36752136752136755, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.36752136752136755, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505416, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505416 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712156, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712156 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27461139896373055, + "acc_stderr": 0.03221024508041153, + "acc_norm": 0.27461139896373055, + "acc_norm_stderr": 0.03221024508041153 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159614, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159614 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484577, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484577 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936484, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936484 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301833, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301833 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733096, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733096 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.3706017104903605, + "mc2_stderr": 0.014735026291520032 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22077922077922077, + "acc_stderr": 0.01426015280354004, + "acc_norm": 0.30932703659976385, + "acc_norm_stderr": 0.015891320505520893 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama2-ko-7b-kullm-base", + "model_sha": "b7db1fa5f45f178d4e98ac52ece14064ded1b7c0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-7b-ko-v1/result_2023-12-27 04:37:48.json b/shleeeee/mistral-7b-ko-v1/result_2023-12-27 04:37:48.json new file mode 100644 index 0000000000000000000000000000000000000000..1740e0b4c2ea1e693cea657509338a4a7e12b9dd --- /dev/null +++ b/shleeeee/mistral-7b-ko-v1/result_2023-12-27 04:37:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30716723549488056, + "acc_stderr": 0.013481034054980945, + "acc_norm": 0.3395904436860068, + "acc_norm_stderr": 0.01383903976282016 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3599880501892053, + "acc_stderr": 0.0047901553709934494, + "acc_norm": 0.4552877912766381, + "acc_norm_stderr": 0.004969790407117545 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.017570705239256537, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.017570705239256537 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596241, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596241 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.028043399858210635, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.028043399858210635 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.033141902221106564, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.033141902221106564 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768362, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768362 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097413, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097413 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.02743086657997347, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.02743086657997347 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.02974504857267407, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.02974504857267407 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.03028500925900981, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.03028500925900981 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307695, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307695 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3908256880733945, + "acc_stderr": 0.02092005834611107, + "acc_norm": 0.3908256880733945, + "acc_norm_stderr": 0.02092005834611107 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883034, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883034 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.01924978569171721, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.01924978569171721 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764377, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764377 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220285, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928006, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.03106721126287249, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.03106721126287249 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002574, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002574 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.4682545459171819, + "mc2_stderr": 0.015428736721872028 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42621015348288077, + "acc_stderr": 0.017002122609489263, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.017186891286894074 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-7b-ko-v1", + "model_sha": "7fade3acf30fa50ff8ae5e11f85fdb6abad37f0b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-7b-wiki/result_2023-11-28 13:22:58.json b/shleeeee/mistral-7b-wiki/result_2023-11-28 13:22:58.json new file mode 100644 index 0000000000000000000000000000000000000000..c1b027408930db12b200ef2f2efd99a08cc261e8 --- /dev/null +++ b/shleeeee/mistral-7b-wiki/result_2023-11-28 13:22:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32764505119453924, + "acc_stderr": 0.01371584794071934, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.014056207319068283 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36516630153355906, + "acc_stderr": 0.00480492760877313, + "acc_norm": 0.4706233817964549, + "acc_norm_stderr": 0.004981161746388225 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.02832774309156106, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.02832774309156106 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.03035152732334493, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.03035152732334493 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596426, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596426 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668787, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668787 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.021432956203453316, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.021432956203453316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094604, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094604 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32737430167597764, + "acc_stderr": 0.015694238967737386, + "acc_norm": 0.32737430167597764, + "acc_norm_stderr": 0.015694238967737386 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225418, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225418 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3246414602346806, + "acc_stderr": 0.011959089388530027, + "acc_norm": 0.3246414602346806, + "acc_norm_stderr": 0.011959089388530027 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.0154610276272536, + "mc2": 0.4510146040568402, + "mc2_stderr": 0.015548731962691761 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.01715916359017022, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292648 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-7b-wiki", + "model_sha": "2f4fcb2ee0756dd46308e60f0a0791caa9c71b75", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-7b-tech/result_2023-11-29 15:45:35.json b/shleeeee/mistral-ko-7b-tech/result_2023-11-29 15:45:35.json new file mode 100644 index 0000000000000000000000000000000000000000..35e03c27b31657274b97c2ee0445b5381f6fba38 --- /dev/null +++ b/shleeeee/mistral-ko-7b-tech/result_2023-11-29 15:45:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3199658703071672, + "acc_stderr": 0.013631345807016196, + "acc_norm": 0.3771331058020478, + "acc_norm_stderr": 0.014163366896192593 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36745668193586933, + "acc_stderr": 0.004811269975450612, + "acc_norm": 0.47829117705636326, + "acc_norm_stderr": 0.004985076094464756 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.017758800534214414, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.017758800534214414 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240627, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240627 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.02804098138076155, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.02804098138076155 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343118, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228405, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425082, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425082 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142635, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142635 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924803, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924803 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611327, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611327 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3329608938547486, + "acc_stderr": 0.015761716178397563, + "acc_norm": 0.3329608938547486, + "acc_norm_stderr": 0.015761716178397563 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280065, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280065 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344215, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344215 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.03393388584958404, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.03393388584958404 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398396, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398396 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237274, + "mc2": 0.43089635616262106, + "mc2_stderr": 0.015490661650732165 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4675324675324675, + "acc_stderr": 0.017154073716682865, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.017186891286894063 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-7b-tech", + "model_sha": "5d238deeb4e026361623067f5ee59a89699a4f66", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-7b-wiki-neft/result_2023-11-29 04:54:52.json b/shleeeee/mistral-ko-7b-wiki-neft/result_2023-11-29 04:54:52.json new file mode 100644 index 0000000000000000000000000000000000000000..c21cc04c0b03d93190f3a7996f0213d54019f307 --- /dev/null +++ b/shleeeee/mistral-ko-7b-wiki-neft/result_2023-11-29 04:54:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.013057169655761838, + "acc_norm": 0.3302047781569966, + "acc_norm_stderr": 0.013743085603760426 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33897629954192393, + "acc_stderr": 0.004723943549005987, + "acc_norm": 0.410973909579765, + "acc_norm_stderr": 0.004910049928688087 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.037712831076265434, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.037712831076265434 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4163473818646232, + "acc_stderr": 0.017627948030430298, + "acc_norm": 0.4163473818646232, + "acc_norm_stderr": 0.017627948030430298 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.028256660723360187, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.028256660723360187 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.03076935200822915, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.03076935200822915 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.03446897738659333, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.03446897738659333 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938145, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938145 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.027273890594300645, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.027273890594300645 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651047, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199593, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199593 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606648, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606648 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.0240268463928735, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.0240268463928735 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.02661335084026174, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.02661335084026174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3889908256880734, + "acc_stderr": 0.02090230088739286, + "acc_norm": 0.3889908256880734, + "acc_norm_stderr": 0.02090230088739286 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849726, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495144, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495144 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31620111731843575, + "acc_stderr": 0.015551673652172552, + "acc_norm": 0.31620111731843575, + "acc_norm_stderr": 0.015551673652172552 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396563, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396563 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163907, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163907 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45569620253164556, + "acc_stderr": 0.03241920684693335, + "acc_norm": 0.45569620253164556, + "acc_norm_stderr": 0.03241920684693335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30182529335071706, + "acc_stderr": 0.01172435051810589, + "acc_norm": 0.30182529335071706, + "acc_norm_stderr": 0.01172435051810589 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570345, + "mc2": 0.410345331144556, + "mc2_stderr": 0.015747686091796973 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36835891381345925, + "acc_stderr": 0.016583858982639074, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.017057753702160287 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-7b-wiki-neft", + "model_sha": "5e22bcb5df31050b2bca6d82b593c87b7fffe462", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-OpenOrca-2000/result_2023-12-04 13:24:07.json b/shleeeee/mistral-ko-OpenOrca-2000/result_2023-12-04 13:24:07.json new file mode 100644 index 0000000000000000000000000000000000000000..f59617995f869323c57a58cafcedb932a21623d1 --- /dev/null +++ b/shleeeee/mistral-ko-OpenOrca-2000/result_2023-12-04 13:24:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.013659980894277371, + "acc_norm": 0.3720136518771331, + "acc_norm_stderr": 0.014124597881844454 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37094204341764586, + "acc_stderr": 0.004820697457420417, + "acc_norm": 0.47769368651663013, + "acc_norm_stderr": 0.0049848133910162075 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44572158365261816, + "acc_stderr": 0.017774297282479503, + "acc_norm": 0.44572158365261816, + "acc_norm_stderr": 0.017774297282479503 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.031410821975962386, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.031410821975962386 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946208, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946208 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686855, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286102, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286102 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137588, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137588 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.040260970832965565, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.040260970832965565 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303673, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303673 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.02993534270787775, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.02993534270787775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3551020408163265, + "acc_stderr": 0.03063565515038764, + "acc_norm": 0.3551020408163265, + "acc_norm_stderr": 0.03063565515038764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646561, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646561 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.016058999026100623, + "mc2": 0.48435280097322475, + "mc2_stderr": 0.015611046017023626 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4309327036599764, + "acc_stderr": 0.017025558196043133, + "acc_norm": 0.4722550177095632, + "acc_norm_stderr": 0.017163867979456005 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-OpenOrca-2000", + "model_sha": "ca1dfe364d0d30557b5fec19ba988b3a0aecc443", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-OpenOrca-Platypus-v1/result_2023-12-07 22:05:15.json b/shleeeee/mistral-ko-OpenOrca-Platypus-v1/result_2023-12-07 22:05:15.json new file mode 100644 index 0000000000000000000000000000000000000000..a2a2716f61e24fc0ca47db29e2c9deb27c6e134a --- /dev/null +++ b/shleeeee/mistral-ko-OpenOrca-Platypus-v1/result_2023-12-07 22:05:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.013760988200880533, + "acc_norm": 0.3839590443686007, + "acc_norm_stderr": 0.014212444980651894 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3668591913961362, + "acc_stderr": 0.004809626723626832, + "acc_norm": 0.4765982871937861, + "acc_norm_stderr": 0.00498431320579144 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.01781824860346556, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.01781824860346556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739428, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739428 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113115, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113115 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02487081525105709, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02487081525105709 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668784, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384486, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.02143295620345332, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.02143295620345332 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142624, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142624 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.020017629214213087, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.020017629214213087 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861131, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861131 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3128491620111732, + "acc_stderr": 0.015506892594647272, + "acc_norm": 0.3128491620111732, + "acc_norm_stderr": 0.015506892594647272 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3285528031290743, + "acc_stderr": 0.011996027247502932, + "acc_norm": 0.3285528031290743, + "acc_norm_stderr": 0.011996027247502932 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.45820405883067095, + "mc2_stderr": 0.015482841809930594 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.017187658199336743, + "acc_norm": 0.5655253837072018, + "acc_norm_stderr": 0.01704209862082492 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-OpenOrca-Platypus-v1", + "model_sha": "ed7028364195063e6e55255259908bd4a5e46b7c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-OpenOrca-Platypus-v2/result_2023-12-19 08:08:58.json b/shleeeee/mistral-ko-OpenOrca-Platypus-v2/result_2023-12-19 08:08:58.json new file mode 100644 index 0000000000000000000000000000000000000000..91a9c263e12c0db28129cac48c9f1bef2eb558bd --- /dev/null +++ b/shleeeee/mistral-ko-OpenOrca-Platypus-v2/result_2023-12-19 08:08:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635476, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407161 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36735710017924716, + "acc_stderr": 0.004810996652324741, + "acc_norm": 0.47739494124676357, + "acc_norm_stderr": 0.004984679359375628 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4521072796934866, + "acc_stderr": 0.017797751493865623, + "acc_norm": 0.4521072796934866, + "acc_norm_stderr": 0.017797751493865623 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330315, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330315 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914394, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914394 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.02455229220934265, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.02455229220934265 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927213, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927213 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786692, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786692 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2916201117318436, + "acc_stderr": 0.015201032512520427, + "acc_norm": 0.2916201117318436, + "acc_norm_stderr": 0.015201032512520427 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131775, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131775 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540483, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540483 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777315, + "mc2": 0.4594938797803625, + "mc2_stderr": 0.01549038818993178 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5289256198347108, + "acc_stderr": 0.017161563949916345, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972205 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-OpenOrca-Platypus-v2", + "model_sha": "b1035824a7a1e57c0b5814912599a4165dd39138", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-OpenOrca-wiki-v1/result_2023-12-05 04:52:36.json b/shleeeee/mistral-ko-OpenOrca-wiki-v1/result_2023-12-05 04:52:36.json new file mode 100644 index 0000000000000000000000000000000000000000..2153afc05b1e3e76fa9e39e0e206d7755fd3a02e --- /dev/null +++ b/shleeeee/mistral-ko-OpenOrca-wiki-v1/result_2023-12-05 04:52:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31143344709897613, + "acc_stderr": 0.013532472099850949, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.014056207319068283 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3685520812587134, + "acc_stderr": 0.004814261966376847, + "acc_norm": 0.47520414260107546, + "acc_norm_stderr": 0.004983641854351151 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041982, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534432, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534432 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.02795048149440127, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.02795048149440127 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110657, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110657 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.04142313771996664, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.04142313771996664 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539743, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.027906150826041143, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.027906150826041143 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776296, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.02931820364520686, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.02931820364520686 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717861, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717861 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.02680372058320619, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.02680372058320619 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160667, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44770642201834865, + "acc_stderr": 0.021319754962425455, + "acc_norm": 0.44770642201834865, + "acc_norm_stderr": 0.021319754962425455 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891765, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866346, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866346 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22905027932960895, + "acc_stderr": 0.014054314935614556, + "acc_norm": 0.22905027932960895, + "acc_norm_stderr": 0.014054314935614556 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3673469387755102, + "acc_stderr": 0.030862144921087555, + "acc_norm": 0.3673469387755102, + "acc_norm_stderr": 0.030862144921087555 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715944, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715944 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353383, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353383 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237043, + "mc2": 0.43990633213087843, + "mc2_stderr": 0.01551429694556166 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43919716646989376, + "acc_stderr": 0.0170627757447807, + "acc_norm": 0.4911452184179457, + "acc_norm_stderr": 0.017187658199336736 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-OpenOrca-wiki-v1", + "model_sha": "e7c8fea7112378edf396bb0753ac5fc3c20b0816", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-exo-mrc-v1/result_2023-12-11 22:03:07.json b/shleeeee/mistral-ko-exo-mrc-v1/result_2023-12-11 22:03:07.json new file mode 100644 index 0000000000000000000000000000000000000000..e34e5f7a2b647db1648c071f64eafccee3230b71 --- /dev/null +++ b/shleeeee/mistral-ko-exo-mrc-v1/result_2023-12-11 22:03:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.013621696119173302, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759086 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3632742481577375, + "acc_stderr": 0.004799599840397375, + "acc_norm": 0.4675363473411671, + "acc_norm_stderr": 0.004979252954977317 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.017810403925435366, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.017810403925435366 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400352, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400352 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749465, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066482, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983053, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983053 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470867, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470867 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354147, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354147 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963755, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963755 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163906, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.01196531153657153, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.01196531153657153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.0154610276272536, + "mc2": 0.44879669499276337, + "mc2_stderr": 0.015443203581643984 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41204250295159384, + "acc_stderr": 0.016922276738528363, + "acc_norm": 0.4805194805194805, + "acc_norm_stderr": 0.01717730199234255 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-exo-mrc-v1", + "model_sha": "4cfebcf52a610101df1d3dad07fae8fe07c6c5b3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-exo-wiki-quiz-v1/result_2023-12-06 03:58:43.json b/shleeeee/mistral-ko-exo-wiki-quiz-v1/result_2023-12-06 03:58:43.json new file mode 100644 index 0000000000000000000000000000000000000000..c3fb25545bac9dbf6a8a5d0d058166d6b254b387 --- /dev/null +++ b/shleeeee/mistral-ko-exo-wiki-quiz-v1/result_2023-12-06 03:58:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623504, + "acc_norm": 0.3626279863481229, + "acc_norm_stderr": 0.014049106564955012 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3604859589723163, + "acc_stderr": 0.004791601975612767, + "acc_norm": 0.46016729735112527, + "acc_norm_stderr": 0.004973922192982237 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.01781040392543537, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.01781040392543537 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262973, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262973 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.02524277098712617, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.02524277098712617 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.034953345821629324, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.034953345821629324 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.02843453315268184, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.02843453315268184 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.029872577708891183, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.029872577708891183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.0302422338008545, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.0302422338008545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.02467786284133278, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.02467786284133278 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357334, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5346820809248555, + "acc_stderr": 0.02685425792825889, + "acc_norm": 0.5346820809248555, + "acc_norm_stderr": 0.02685425792825889 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138936, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138936 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379414, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379414 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.033448873829978666, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.033448873829978666 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3463687150837989, + "acc_stderr": 0.01591354678402012, + "acc_norm": 0.3463687150837989, + "acc_norm_stderr": 0.01591354678402012 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3246414602346806, + "acc_stderr": 0.011959089388530027, + "acc_norm": 0.3246414602346806, + "acc_norm_stderr": 0.011959089388530027 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476199, + "mc2": 0.4419757231981567, + "mc2_stderr": 0.01567221917412918 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.4982290436835891, + "acc_norm_stderr": 0.017190246276231863 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-exo-wiki-quiz-v1", + "model_sha": "ade18612c96f02d3524e0a318caf470821f3067b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-openorca-platypus-1epoch/result_2023-12-21 05:12:26.json b/shleeeee/mistral-ko-openorca-platypus-1epoch/result_2023-12-21 05:12:26.json new file mode 100644 index 0000000000000000000000000000000000000000..722549f6cb3c7bddca8f286f51af74988f13402b --- /dev/null +++ b/shleeeee/mistral-ko-openorca-platypus-1epoch/result_2023-12-21 05:12:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.302901023890785, + "acc_stderr": 0.013428241573185349, + "acc_norm": 0.3677474402730375, + "acc_norm_stderr": 0.014090995618168478 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3569010157339175, + "acc_stderr": 0.004781061390873917, + "acc_norm": 0.44284007169886475, + "acc_norm_stderr": 0.004957068377516513 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.01785777070490102, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.01785777070490102 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214338, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296559, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296559 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449838, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449838 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611306, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611306 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3329608938547486, + "acc_stderr": 0.01576171617839756, + "acc_norm": 0.3329608938547486, + "acc_norm_stderr": 0.01576171617839756 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.031843998738112236, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.031843998738112236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3194263363754889, + "acc_stderr": 0.011908357176756158, + "acc_norm": 0.3194263363754889, + "acc_norm_stderr": 0.011908357176756158 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.47712757229255764, + "mc2_stderr": 0.015671925843446344 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.017190246276231853, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549346 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-openorca-platypus-1epoch", + "model_sha": "7252aa91a9c671044b2c871ea90040e60c2fd27a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-tech-science-v1/result_2023-12-12 22:56:56.json b/shleeeee/mistral-ko-tech-science-v1/result_2023-12-12 22:56:56.json new file mode 100644 index 0000000000000000000000000000000000000000..8960d6bc6bcee71803ee7afb9d95bbce49aa77e4 --- /dev/null +++ b/shleeeee/mistral-ko-tech-science-v1/result_2023-12-12 22:56:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.318259385665529, + "acc_stderr": 0.013611993916971453, + "acc_norm": 0.3728668941979522, + "acc_norm_stderr": 0.014131176760131167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3648675562636925, + "acc_stderr": 0.004804091708812552, + "acc_norm": 0.4733120892252539, + "acc_norm_stderr": 0.00498266845211894 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45338441890166026, + "acc_stderr": 0.017802087135850294, + "acc_norm": 0.45338441890166026, + "acc_norm_stderr": 0.017802087135850294 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.02533900301010653, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.02533900301010653 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419871, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419871 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778657, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778657 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.02758600622160773, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.02758600622160773 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225882, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225882 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786682, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786682 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.033448873829978666, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.033448873829978666 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.35195530726256985, + "acc_stderr": 0.01597266852368907, + "acc_norm": 0.35195530726256985, + "acc_norm_stderr": 0.01597266852368907 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763126, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763126 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.01204966898321493, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.01204966898321493 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015473, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015473 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087307, + "mc2": 0.45024219070187044, + "mc2_stderr": 0.015413860078049907 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.017187658199336736, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-tech-science-v1", + "model_sha": "f2c72cef947305f1e867f572d66963209e281788", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/skt/ko-gpt-trinity-1.2B-v0.5/result_2023-09-27 05:12:48.json b/skt/ko-gpt-trinity-1.2B-v0.5/result_2023-09-27 05:12:48.json new file mode 100644 index 0000000000000000000000000000000000000000..b9c8cc5ba2337301063c19ef97238d0de5ac81f6 --- /dev/null +++ b/skt/ko-gpt-trinity-1.2B-v0.5/result_2023-09-27 05:12:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21331058020477817, + "acc_stderr": 0.011970971742326334, + "acc_norm": 0.2687713310580205, + "acc_norm_stderr": 0.012955065963710686 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3132842063333997, + "acc_stderr": 0.004628809258483527, + "acc_norm": 0.3736307508464449, + "acc_norm_stderr": 0.004827786289074844 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2046783625730994, + "acc_stderr": 0.03094445977853321, + "acc_norm": 0.2046783625730994, + "acc_norm_stderr": 0.03094445977853321 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.01581845089477755, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.01581845089477755 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03591444084196969, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03591444084196969 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.027678452578212373, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.027678452578212373 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2282958199356913, + "acc_stderr": 0.023839303311398195, + "acc_norm": 0.2282958199356913, + "acc_norm_stderr": 0.023839303311398195 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.030684737115135356, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.030684737115135356 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3230769230769231, + "acc_stderr": 0.02371088850197057, + "acc_norm": 0.3230769230769231, + "acc_norm_stderr": 0.02371088850197057 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243838, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243838 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.026522709674667768, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.026522709674667768 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.02777883590493544, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.02777883590493544 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.02528839450289137, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.02528839450289137 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.19205298013245034, + "acc_stderr": 0.03216298420593612, + "acc_norm": 0.19205298013245034, + "acc_norm_stderr": 0.03216298420593612 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1791907514450867, + "acc_stderr": 0.02924251305906329, + "acc_norm": 0.1791907514450867, + "acc_norm_stderr": 0.02924251305906329 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25308641975308643, + "acc_stderr": 0.024191808600713002, + "acc_norm": 0.25308641975308643, + "acc_norm_stderr": 0.024191808600713002 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3471502590673575, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.3471502590673575, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.01792308766780305, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.01792308766780305 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848877, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848877 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.02463004897982476, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.02463004897982476 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677105, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677105 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2434640522875817, + "acc_stderr": 0.017362473762146623, + "acc_norm": 0.2434640522875817, + "acc_norm_stderr": 0.017362473762146623 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.01485499393801008, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.01485499393801008 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17551020408163265, + "acc_stderr": 0.02435280072297001, + "acc_norm": 0.17551020408163265, + "acc_norm_stderr": 0.02435280072297001 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.02957160106575337, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.02957160106575337 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2405475880052151, + "acc_stderr": 0.010916406735478947, + "acc_norm": 0.2405475880052151, + "acc_norm_stderr": 0.010916406735478947 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.01517698502770768, + "mc2": 0.4268789482469243, + "mc2_stderr": 0.015138938072410749 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2833530106257379, + "acc_stderr": 0.015492852084597239, + "acc_norm": 0.39433293978748524, + "acc_norm_stderr": 0.016802090674893196 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "skt/ko-gpt-trinity-1.2B-v0.5", + "model_sha": "33f84c0da333d34533f0cfbe8f5972022d681e96", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/skt/kogpt2-base-v2/result_2023-10-04 13:20:51.json b/skt/kogpt2-base-v2/result_2023-10-04 13:20:51.json new file mode 100644 index 0000000000000000000000000000000000000000..81718d32826184ec3f9bbdd75749050420f89714 --- /dev/null +++ b/skt/kogpt2-base-v2/result_2023-10-04 13:20:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19197952218430034, + "acc_stderr": 0.011509598906598086, + "acc_norm": 0.23976109215017063, + "acc_norm_stderr": 0.012476304127453947 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2806213901613224, + "acc_stderr": 0.004483845735187827, + "acc_norm": 0.3103963353913563, + "acc_norm_stderr": 0.0046171032803720095 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28607918263090676, + "acc_stderr": 0.016160871405127526, + "acc_norm": 0.28607918263090676, + "acc_norm_stderr": 0.016160871405127526 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066654, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066654 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.0281854413012341, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.0281854413012341 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2797427652733119, + "acc_stderr": 0.025494259350694905, + "acc_norm": 0.2797427652733119, + "acc_norm_stderr": 0.025494259350694905 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168264, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.02860620428922987, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.02860620428922987 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931666, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.024283140529467295, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.024283140529467295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23870967741935484, + "acc_stderr": 0.024251071262208834, + "acc_norm": 0.23870967741935484, + "acc_norm_stderr": 0.024251071262208834 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.02461829819586651, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02461829819586651 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721377, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721377 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834839, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834839 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.03456425745086999, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.03456425745086999 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21100917431192662, + "acc_stderr": 0.017493922404112648, + "acc_norm": 0.21100917431192662, + "acc_norm_stderr": 0.017493922404112648 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.033954900208561116, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.033954900208561116 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888156, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888156 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148594, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148594 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.01100597139992723, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.01100597139992723 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.01522589934082682, + "mc2": 0.45650352414713125, + "mc2_stderr": 0.015641592781139333 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36363636363636365, + "acc_stderr": 0.016538691603327715, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.017161563949916345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "skt/kogpt2-base-v2", + "model_sha": "d0c0df48bf2b2c9350dd855021a5b216f560c0c7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sminpark/ds-alpha-model-v0.1-merged/result_2023-10-12 05:18:50.json b/sminpark/ds-alpha-model-v0.1-merged/result_2023-10-12 05:18:50.json new file mode 100644 index 0000000000000000000000000000000000000000..e688cc499ee3f3313ac36f7711f00b889083aa73 --- /dev/null +++ b/sminpark/ds-alpha-model-v0.1-merged/result_2023-10-12 05:18:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.01305716965576184, + "acc_norm": 0.3225255972696246, + "acc_norm_stderr": 0.013659980894277366 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3858793069109739, + "acc_stderr": 0.0048580740134439885, + "acc_norm": 0.4965146385182235, + "acc_norm_stderr": 0.0049896601807921685 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822582, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822582 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25287356321839083, + "acc_stderr": 0.01554337731371968, + "acc_norm": 0.25287356321839083, + "acc_norm_stderr": 0.01554337731371968 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.1925925925925926, + "acc_stderr": 0.034065420585026526, + "acc_norm": 0.1925925925925926, + "acc_norm_stderr": 0.034065420585026526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412482, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412482 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064537, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064537 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.29260450160771706, + "acc_stderr": 0.02583989833487798, + "acc_norm": 0.29260450160771706, + "acc_norm_stderr": 0.02583989833487798 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19282511210762332, + "acc_stderr": 0.02647824096048936, + "acc_norm": 0.19282511210762332, + "acc_norm_stderr": 0.02647824096048936 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.0307463007421245, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.0307463007421245 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.023060438380857744, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.023060438380857744 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733555, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733555 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2064516129032258, + "acc_stderr": 0.02302589961718871, + "acc_norm": 0.2064516129032258, + "acc_norm_stderr": 0.02302589961718871 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.27350427350427353, + "acc_stderr": 0.029202540153431177, + "acc_norm": 0.27350427350427353, + "acc_norm_stderr": 0.029202540153431177 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708097, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708097 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960954, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960954 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772432, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772432 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566018, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566018 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587404, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587404 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.024288533637726095, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.024288533637726095 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26238532110091745, + "acc_stderr": 0.018861885021534734, + "acc_norm": 0.26238532110091745, + "acc_norm_stderr": 0.018861885021534734 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.03512207412302053, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.03512207412302053 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.024630048979824775, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.024630048979824775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19008264462809918, + "acc_stderr": 0.03581796951709282, + "acc_norm": 0.19008264462809918, + "acc_norm_stderr": 0.03581796951709282 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.01755581809132227, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.01755581809132227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044811, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044811 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.02423101337054109, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.02423101337054109 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.0283046579430353, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.0283046579430353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.011186109046564608, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.011186109046564608 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.01520152224629995, + "mc2": 0.39714724864543566, + "mc2_stderr": 0.014754643585296967 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346453, + "acc_norm": 0.38961038961038963, + "acc_norm_stderr": 0.016766161671893497 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sminpark/ds-alpha-model-v0.1-merged", + "model_sha": "877c87e7e62fa297f23e49e4aed3a2c0398a920a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sohohuk/test1/result_2023-11-01 06:10:38.json b/sohohuk/test1/result_2023-11-01 06:10:38.json new file mode 100644 index 0000000000000000000000000000000000000000..0970ae0bd33668e1adc4e4b6d963098b8aa913be --- /dev/null +++ b/sohohuk/test1/result_2023-11-01 06:10:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.013621696119173304, + "acc_norm": 0.36860068259385664, + "acc_norm_stderr": 0.014097810678042196 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35261900019916353, + "acc_stderr": 0.004768088918512186, + "acc_norm": 0.4509061939852619, + "acc_norm_stderr": 0.004965670398127352 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.454661558109834, + "acc_stderr": 0.017806304585052602, + "acc_norm": 0.454661558109834, + "acc_norm_stderr": 0.017806304585052602 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.028256660723360184, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.028256660723360184 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.0305728113102996, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.0305728113102996 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.03028500925900981, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.03028500925900981 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.035161847729521675, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.035161847729521675 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.03567603799639171, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.03567603799639171 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47155963302752296, + "acc_stderr": 0.021402615697348047, + "acc_norm": 0.47155963302752296, + "acc_norm_stderr": 0.021402615697348047 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088837, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088837 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925303, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925303 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.032002553478937816, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.032002553478937816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4978902953586498, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.4978902953586498, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.01185591158704823, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.01185591158704823 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32068543451652387, + "mc1_stderr": 0.016339170373280906, + "mc2": 0.5002957366542341, + "mc2_stderr": 0.015624413933134037 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4167650531286895, + "acc_stderr": 0.01695048914610883, + "acc_norm": 0.48406139315230223, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sohohuk/test1", + "model_sha": "b7b22b459409508e80e7d6a72c09e5e5f765428e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/squarelike/llama2-ko-medical-7b/result_2023-11-02 08:01:53.json b/squarelike/llama2-ko-medical-7b/result_2023-11-02 08:01:53.json new file mode 100644 index 0000000000000000000000000000000000000000..f37d086ce72c4b68b364640c167b7fc5fc56a4e2 --- /dev/null +++ b/squarelike/llama2-ko-medical-7b/result_2023-11-02 08:01:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1885665529010239, + "acc_stderr": 0.011430897647675815, + "acc_norm": 0.2380546075085324, + "acc_norm_stderr": 0.012445770028026205 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25273849830711015, + "acc_stderr": 0.004336941069568736, + "acc_norm": 0.2559251145190201, + "acc_norm_stderr": 0.004354881005789729 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.273142112125163, + "acc_stderr": 0.011380150567830394, + "acc_norm": 0.273142112125163, + "acc_norm_stderr": 0.011380150567830394 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752325, + "mc2": 0.4920860295556251, + "mc2_stderr": 0.016960443639831176 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0755608028335301, + "acc_stderr": 0.00908661811311919, + "acc_norm": 0.2526564344746163, + "acc_norm_stderr": 0.014939640598798425 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "squarelike/llama2-ko-medical-7b", + "model_sha": "85acb5d9285798c89b004dc02b093b2d15a84116", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sronger/ko-llm-llama-2-7b-LoRA-IA3/result_2023-11-29 11:35:39.json b/sronger/ko-llm-llama-2-7b-LoRA-IA3/result_2023-11-29 11:35:39.json new file mode 100644 index 0000000000000000000000000000000000000000..e842db785c592dbdcf2c27b84312ee575d560334 --- /dev/null +++ b/sronger/ko-llm-llama-2-7b-LoRA-IA3/result_2023-11-29 11:35:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19283276450511946, + "acc_stderr": 0.011529055465663325, + "acc_norm": 0.24573378839590443, + "acc_norm_stderr": 0.012581033453730111 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2506472814180442, + "acc_stderr": 0.004325000473328607, + "acc_norm": 0.24885480979884486, + "acc_norm_stderr": 0.0043146590346493955 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.29118773946360155, + "acc_stderr": 0.016246087069701404, + "acc_norm": 0.29118773946360155, + "acc_norm_stderr": 0.016246087069701404 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.17777777777777778, + "acc_stderr": 0.03302789859901717, + "acc_norm": 0.17777777777777778, + "acc_norm_stderr": 0.03302789859901717 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071855, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071855 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291954, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291954 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.02665353159671548, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.02665353159671548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24102564102564103, + "acc_stderr": 0.02168554666533319, + "acc_norm": 0.24102564102564103, + "acc_norm_stderr": 0.02168554666533319 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.03051653073269444, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.03051653073269444 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.18387096774193548, + "acc_stderr": 0.02203721734026783, + "acc_norm": 0.18387096774193548, + "acc_norm_stderr": 0.02203721734026783 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891162, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891162 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.02634148037111834, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.02634148037111834 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276611, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276611 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.19205298013245034, + "acc_stderr": 0.03216298420593613, + "acc_norm": 0.19205298013245034, + "acc_norm_stderr": 0.03216298420593613 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.031265112061730424, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.031265112061730424 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21957671957671956, + "acc_stderr": 0.02132001859977035, + "acc_norm": 0.21957671957671956, + "acc_norm_stderr": 0.02132001859977035 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548594, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548594 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2191358024691358, + "acc_stderr": 0.023016705640262185, + "acc_norm": 0.2191358024691358, + "acc_norm_stderr": 0.023016705640262185 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.18134715025906736, + "acc_stderr": 0.02780703236068609, + "acc_norm": 0.18134715025906736, + "acc_norm_stderr": 0.02780703236068609 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.018125669180861507, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.018125669180861507 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818115, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818115 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02428861946604611, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02428861946604611 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.272875816993464, + "acc_stderr": 0.01802047414839358, + "acc_norm": 0.272875816993464, + "acc_norm_stderr": 0.01802047414839358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.21631205673758866, + "acc_stderr": 0.024561720560562786, + "acc_norm": 0.21631205673758866, + "acc_norm_stderr": 0.024561720560562786 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.14814814814814814, + "acc_stderr": 0.02422762927372837, + "acc_norm": 0.14814814814814814, + "acc_norm_stderr": 0.02422762927372837 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2201117318435754, + "acc_stderr": 0.013856994024227179, + "acc_norm": 0.2201117318435754, + "acc_norm_stderr": 0.013856994024227179 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.02315746830855936, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.02315746830855936 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417348, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417348 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251742, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251742 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.18787878787878787, + "acc_stderr": 0.03050193405942914, + "acc_norm": 0.18787878787878787, + "acc_norm_stderr": 0.03050193405942914 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715018, + "mc2": 0.4957664753905337, + "mc2_stderr": 0.016720650341527123 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07910271546635184, + "acc_stderr": 0.00927931912600906, + "acc_norm": 0.3317591499409681, + "acc_norm_stderr": 0.016187984642157312 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sronger/ko-llm-llama-2-7b-LoRA-IA3", + "model_sha": "2294d6279c3055c45b7e33953feae619534408e8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sronger/ko-llm-llama-2-7b-chat2/result_2023-11-29 08:01:38.json b/sronger/ko-llm-llama-2-7b-chat2/result_2023-11-29 08:01:38.json new file mode 100644 index 0000000000000000000000000000000000000000..83ea5c2bcbf82d8e2e9a8e5385856cf89e5bbbc2 --- /dev/null +++ b/sronger/ko-llm-llama-2-7b-chat2/result_2023-11-29 08:01:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.18430034129692832, + "acc_stderr": 0.011330517933037415, + "acc_norm": 0.2354948805460751, + "acc_norm_stderr": 0.012399451855004746 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25323640709022105, + "acc_stderr": 0.004339764434219062, + "acc_norm": 0.24507070304720174, + "acc_norm_stderr": 0.0042925005017162305 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2711864406779661, + "acc_stderr": 0.011354581451622985, + "acc_norm": 0.2711864406779661, + "acc_norm_stderr": 0.011354581451622985 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268049, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268049 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707682, + "mc2": 0.5051802317030636, + "mc2_stderr": 0.016856528106826654 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.06847697756788666, + "acc_stderr": 0.008683282020992616, + "acc_norm": 0.3293978748524203, + "acc_norm_stderr": 0.016158746868147143 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sronger/ko-llm-llama-2-7b-chat2", + "model_sha": "6f4b2c09a0ef4e114c83ccf1bd6e131d1a0fb39a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sronger/ko-llm-llama-2-7b-chat3/result_2023-11-29 10:32:08.json b/sronger/ko-llm-llama-2-7b-chat3/result_2023-11-29 10:32:08.json new file mode 100644 index 0000000000000000000000000000000000000000..0d44c77cf25dd4c713ec4881ad565714b1469361 --- /dev/null +++ b/sronger/ko-llm-llama-2-7b-chat3/result_2023-11-29 10:32:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1885665529010239, + "acc_stderr": 0.011430897647675823, + "acc_norm": 0.24488054607508533, + "acc_norm_stderr": 0.012566273985131354 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2545309699263095, + "acc_stderr": 0.0043470700195274775, + "acc_norm": 0.24746066520613424, + "acc_norm_stderr": 0.004306547156331383 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715013, + "mc2": 0.5039896617577472, + "mc2_stderr": 0.016756833084863455 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.06847697756788666, + "acc_stderr": 0.008683282020992614, + "acc_norm": 0.33293978748524206, + "acc_norm_stderr": 0.016202431208373797 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sronger/ko-llm-llama-2-7b-chat3", + "model_sha": "96cebf91679d9a5910486aa0324f2f6db685623a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sronger/koko_test/result_2023-11-29 08:29:42.json b/sronger/koko_test/result_2023-11-29 08:29:42.json new file mode 100644 index 0000000000000000000000000000000000000000..811011ce08b248f80c46b4fa165499fa1f3d8c88 --- /dev/null +++ b/sronger/koko_test/result_2023-11-29 08:29:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20051194539249148, + "acc_stderr": 0.011700318050499363, + "acc_norm": 0.2721843003412969, + "acc_norm_stderr": 0.013006600406423704 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25124477195777734, + "acc_stderr": 0.00432842570099869, + "acc_norm": 0.24736108344951205, + "acc_norm_stderr": 0.004305965431515147 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.039891398595317706, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.039891398595317706 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.22289156626506024, + "acc_stderr": 0.03240004825594687, + "acc_norm": 0.22289156626506024, + "acc_norm_stderr": 0.03240004825594687 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932026, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932026 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.028286324075564386, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.028286324075564386 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357301, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.02525117393649502, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.02525117393649502 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21651376146788992, + "acc_stderr": 0.017658710594443145, + "acc_norm": 0.21651376146788992, + "acc_norm_stderr": 0.017658710594443145 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484584, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484584 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22766217870257038, + "mc1_stderr": 0.014679255032111068, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07201889020070838, + "acc_stderr": 0.008888072708500573, + "acc_norm": 0.21959858323494688, + "acc_norm_stderr": 0.014232743085580256 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sronger/koko_test", + "model_sha": "6e22512c2781b0f5e34d50358a02ff7d1f00da40", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sronger/mistral-ko-llm/result_2023-12-04 07:31:58.json b/sronger/mistral-ko-llm/result_2023-12-04 07:31:58.json new file mode 100644 index 0000000000000000000000000000000000000000..bf82adcd63228fd94b20367a0ae6c9e697f5231d --- /dev/null +++ b/sronger/mistral-ko-llm/result_2023-12-04 07:31:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.181740614334471, + "acc_stderr": 0.011269198948880236, + "acc_norm": 0.24744027303754265, + "acc_norm_stderr": 0.01261035266329267 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25323640709022105, + "acc_stderr": 0.004339764434219061, + "acc_norm": 0.2477594104760008, + "acc_norm_stderr": 0.00430829048410049 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.014948812679062135, + "mc2": 0.4836672858203277, + "mc2_stderr": 0.017102451726330438 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08028335301062574, + "acc_stderr": 0.009342316867054019, + "acc_norm": 0.32585596221959856, + "acc_norm_stderr": 0.016114023894800326 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sronger/mistral-ko-llm", + "model_sha": "f3077660096ca9ec680dea9b50a82ff338d8da85", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:06:42.json b/sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:06:42.json new file mode 100644 index 0000000000000000000000000000000000000000..e18f2b5580bac58f38a92dd55f168a01ae17cce6 --- /dev/null +++ b/sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:06:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2713310580204778, + "acc_stderr": 0.012993807727545789, + "acc_norm": 0.310580204778157, + "acc_norm_stderr": 0.013522292098053057 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36456881099382593, + "acc_stderr": 0.004803253812881045, + "acc_norm": 0.46564429396534557, + "acc_norm_stderr": 0.004977988452502642 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23499361430395913, + "acc_stderr": 0.015162024152278445, + "acc_norm": 0.23499361430395913, + "acc_norm_stderr": 0.015162024152278445 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.1829787234042553, + "acc_stderr": 0.02527604100044997, + "acc_norm": 0.1829787234042553, + "acc_norm_stderr": 0.02527604100044997 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.0357160923005348, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.0357160923005348 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.0252180403734106, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.0252180403734106 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.27802690582959644, + "acc_stderr": 0.030069584874494033, + "acc_norm": 0.27802690582959644, + "acc_norm_stderr": 0.030069584874494033 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596918, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596918 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.033184773338453315, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.033184773338453315 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793254, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793254 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602364, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602364 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243838, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243838 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03010833071801162, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03010833071801162 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517414, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517414 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.02704685763071668, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.02704685763071668 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241238, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910507, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.03368762932259431, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.03368762932259431 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948365, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948365 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.022894082489925992, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.022894082489925992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.024569223600460845, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.024569223600460845 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3247706422018349, + "acc_stderr": 0.02007772910931033, + "acc_norm": 0.3247706422018349, + "acc_norm_stderr": 0.02007772910931033 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818737, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818737 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516302, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516302 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351586, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351586 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22712418300653595, + "acc_stderr": 0.016949853279212376, + "acc_norm": 0.22712418300653595, + "acc_norm_stderr": 0.016949853279212376 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3020408163265306, + "acc_stderr": 0.029393609319879808, + "acc_norm": 0.3020408163265306, + "acc_norm_stderr": 0.029393609319879808 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25097783572359844, + "acc_stderr": 0.011073730299187224, + "acc_norm": 0.25097783572359844, + "acc_norm_stderr": 0.011073730299187224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384308, + "mc2": 0.4382110452098873, + "mc2_stderr": 0.015112522165835224 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.016114023894800326, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.016836377292849303 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b", + "model_sha": "79acd9e76f6a5f1e814294761b11c31fc24b9e64", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:49:57.json b/sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:49:57.json new file mode 100644 index 0000000000000000000000000000000000000000..4fc44ac866aa2946d163962cd97c361543bae564 --- /dev/null +++ b/sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:49:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2841296928327645, + "acc_stderr": 0.013179442447653887, + "acc_norm": 0.32593856655290104, + "acc_norm_stderr": 0.013697432466693237 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3791077474606652, + "acc_stderr": 0.004841734453506664, + "acc_norm": 0.4759012148974308, + "acc_norm_stderr": 0.004983982396187361 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.030267457554898465, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.030267457554898465 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2503192848020434, + "acc_stderr": 0.015491088951494597, + "acc_norm": 0.2503192848020434, + "acc_norm_stderr": 0.015491088951494597 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.251063829787234, + "acc_stderr": 0.028346963777162452, + "acc_norm": 0.251063829787234, + "acc_norm_stderr": 0.028346963777162452 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.033844291552331346, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.033844291552331346 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291936, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291936 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.037800192304380135, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.037800192304380135 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863783, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863783 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02213908110397153, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02213908110397153 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733545, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24193548387096775, + "acc_stderr": 0.024362599693031096, + "acc_norm": 0.24193548387096775, + "acc_norm_stderr": 0.024362599693031096 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.027046857630716677, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.027046857630716677 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118352, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118352 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910508, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473836, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473836 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.021765961672154534, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.021765961672154534 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615769, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615769 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.0242885336377261, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.0242885336377261 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.03646758875075566, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.03646758875075566 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30091743119266057, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.30091743119266057, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.017282760695167418, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.017282760695167418 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.024414612974307703, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.024414612974307703 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.03022522616001237, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.03022522616001237 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670736, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.027979823538744546, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.027979823538744546 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2470664928292047, + "acc_stderr": 0.011015752255279338, + "acc_norm": 0.2470664928292047, + "acc_norm_stderr": 0.011015752255279338 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350194, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350194 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326905, + "mc2": 0.43178124206391555, + "mc2_stderr": 0.01588615796057271 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29161747343565525, + "acc_stderr": 0.015626276690070242, + "acc_norm": 0.3187721369539551, + "acc_norm_stderr": 0.016021427055309588 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b", + "model_sha": "acc7ed3105114ba922fe4b408807b57e39ec0cff", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/taeminlee/mistral_7B_ma/result_2023-10-19 06:42:26.json b/taeminlee/mistral_7B_ma/result_2023-10-19 06:42:26.json new file mode 100644 index 0000000000000000000000000000000000000000..f57b552d668317ee19822f6ca7fa7be56512d658 --- /dev/null +++ b/taeminlee/mistral_7B_ma/result_2023-10-19 06:42:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.01379618294778556, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668526 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3703445528779128, + "acc_stderr": 0.004819100456867818, + "acc_norm": 0.481876120294762, + "acc_norm_stderr": 0.004986502296931182 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468544, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214338, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142628, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142628 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.01980828131744984, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.01980828131744984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643525, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643525 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.34413407821229053, + "acc_stderr": 0.015889221313307094, + "acc_norm": 0.34413407821229053, + "acc_norm_stderr": 0.015889221313307094 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877743, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.03175195237583323, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.03175195237583323 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666535, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666535 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.4613168911756529, + "mc2_stderr": 0.015417066073991514 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654273, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "taeminlee/mistral_7B_ma", + "model_sha": "9773826bd9bb297186b78c87a410cbb07e1919cc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/taeminlee/polyglot_12.8b_ins_orcastyle/result_2023-10-10 08:18:37.json b/taeminlee/polyglot_12.8b_ins_orcastyle/result_2023-10-10 08:18:37.json new file mode 100644 index 0000000000000000000000000000000000000000..8ddb6a07697bbc98aa23c36b089a4c1cdcd2ef67 --- /dev/null +++ b/taeminlee/polyglot_12.8b_ins_orcastyle/result_2023-10-10 08:18:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27047781569965873, + "acc_stderr": 0.012980954547659556, + "acc_norm": 0.32849829351535836, + "acc_norm_stderr": 0.01372497846553737 + }, + "harness|ko_hellaswag|10": { + "acc": 0.386476797450707, + "acc_stderr": 0.004859467984155263, + "acc_norm": 0.4980083648675563, + "acc_norm_stderr": 0.004989741826250384 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.036756688322331886, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.036756688322331886 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27330779054916987, + "acc_stderr": 0.015936681062628556, + "acc_norm": 0.27330779054916987, + "acc_norm_stderr": 0.015936681062628556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617722, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617722 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2297872340425532, + "acc_stderr": 0.027501752944412424, + "acc_norm": 0.2297872340425532, + "acc_norm_stderr": 0.027501752944412424 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.032082844503563655, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.032082844503563655 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140474, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140474 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19282511210762332, + "acc_stderr": 0.02647824096048936, + "acc_norm": 0.19282511210762332, + "acc_norm_stderr": 0.02647824096048936 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.20610687022900764, + "acc_stderr": 0.03547771004159465, + "acc_norm": 0.20610687022900764, + "acc_norm_stderr": 0.03547771004159465 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.03031371053819887, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.03031371053819887 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727771, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727771 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.02702543349888239, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.02702543349888239 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.02119363252514854, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.02119363252514854 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.029678333141444455, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.029678333141444455 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22580645161290322, + "acc_stderr": 0.023785577884181012, + "acc_norm": 0.22580645161290322, + "acc_norm_stderr": 0.023785577884181012 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.030236389942173092, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.030236389942173092 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.02634148037111835, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.02634148037111835 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072774, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072774 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230193, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230193 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.03336767086567977, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.03336767086567977 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21164021164021163, + "acc_stderr": 0.021037331505262883, + "acc_norm": 0.21164021164021163, + "acc_norm_stderr": 0.021037331505262883 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.023083658586984204, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.023083658586984204 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.02577311116963043, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.02577311116963043 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.029778663037752975, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.029778663037752975 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3137614678899083, + "acc_stderr": 0.019894723341469134, + "acc_norm": 0.3137614678899083, + "acc_norm_stderr": 0.019894723341469134 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818115, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818115 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04391326286724071, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04391326286724071 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810536, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810536 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2198581560283688, + "acc_stderr": 0.024706141070705488, + "acc_norm": 0.2198581560283688, + "acc_norm_stderr": 0.024706141070705488 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681453, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681453 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2653194263363755, + "acc_stderr": 0.011276198843958866, + "acc_norm": 0.2653194263363755, + "acc_norm_stderr": 0.011276198843958866 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.034277431758165236, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.034277431758165236 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476189, + "mc2": 0.42136905448237544, + "mc2_stderr": 0.014761634772803508 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3317591499409681, + "acc_stderr": 0.016187984642157312, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.016876941165045612 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "taeminlee/polyglot_12.8b_ins_orcastyle", + "model_sha": "eed242f438b6ee3860a810454126f468373836b2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/taeminlee/polyglot_12.8b_ins_orcastyle_ma/result_2023-10-16 09:17:56.json b/taeminlee/polyglot_12.8b_ins_orcastyle_ma/result_2023-10-16 09:17:56.json new file mode 100644 index 0000000000000000000000000000000000000000..1d3bed86889c4beeadff924ca50d33bd533ec8ef --- /dev/null +++ b/taeminlee/polyglot_12.8b_ins_orcastyle_ma/result_2023-10-16 09:17:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2636518771331058, + "acc_stderr": 0.012875929151297073, + "acc_norm": 0.31399317406143346, + "acc_norm_stderr": 0.013562691224726284 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3792073292172874, + "acc_stderr": 0.0048419819735152775, + "acc_norm": 0.48775144393547104, + "acc_norm_stderr": 0.004988283981631052 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.031267817146631786, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.031267817146631786 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2554278416347382, + "acc_stderr": 0.015594955384455765, + "acc_norm": 0.2554278416347382, + "acc_norm_stderr": 0.015594955384455765 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.037857144650666544, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.037857144650666544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292326, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292326 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064536, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185416, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185416 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.29596412556053814, + "acc_stderr": 0.03063659134869981, + "acc_norm": 0.29596412556053814, + "acc_norm_stderr": 0.03063659134869981 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.040393149787245626, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.040393149787245626 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.03068473711513535, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.03068473711513535 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3128205128205128, + "acc_stderr": 0.023507579020645347, + "acc_norm": 0.3128205128205128, + "acc_norm_stderr": 0.023507579020645347 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553873, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553873 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241238, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721377, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721377 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.031157150869355554, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.031157150869355554 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031715, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031715 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258176, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258176 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.024477222856135114, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.024477222856135114 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32642487046632124, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.32642487046632124, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3284403669724771, + "acc_stderr": 0.020135902797298395, + "acc_norm": 0.3284403669724771, + "acc_norm_stderr": 0.020135902797298395 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.02582916327275748, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.02582916327275748 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.23140495867768596, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.23140495867768596, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.03611780560284898, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.03611780560284898 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23366013071895425, + "acc_stderr": 0.017119158496044506, + "acc_norm": 0.23366013071895425, + "acc_norm_stderr": 0.017119158496044506 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226673, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347018, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347018 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.031001209039894836, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.031001209039894836 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460295, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460295 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24771838331160365, + "acc_stderr": 0.011025499291443737, + "acc_norm": 0.24771838331160365, + "acc_norm_stderr": 0.011025499291443737 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.01548369193923726, + "mc2": 0.4409725050511923, + "mc2_stderr": 0.014977060866131325 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3282172373081464, + "acc_stderr": 0.016143955036184442, + "acc_norm": 0.3919716646989374, + "acc_norm_stderr": 0.016784332119424074 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "taeminlee/polyglot_12.8b_ins_orcastyle_ma", + "model_sha": "e59d2d5beba5386564f914402663e3d530a1b093", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/teknium/OpenHermes-2-Mistral-7B/result_2023-10-23 09:00:37.json b/teknium/OpenHermes-2-Mistral-7B/result_2023-10-23 09:00:37.json new file mode 100644 index 0000000000000000000000000000000000000000..e2ba147aee96d2387adeb256a854b3a8119e89a4 --- /dev/null +++ b/teknium/OpenHermes-2-Mistral-7B/result_2023-10-23 09:00:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3310580204778157, + "acc_stderr": 0.013752062419817836, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759075 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37711611232822145, + "acc_stderr": 0.004836738514051328, + "acc_norm": 0.48078072097191793, + "acc_norm_stderr": 0.004986093791041655 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.017865944827291605, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.017865944827291605 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942656, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094527, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094527 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155247, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155247 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.026918645383239015, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.026918645383239015 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833942, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529424, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529424 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.019987809769482064, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.019987809769482064 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2223463687150838, + "acc_stderr": 0.013907189208156881, + "acc_norm": 0.2223463687150838, + "acc_norm_stderr": 0.013907189208156881 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254184, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254184 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34485006518904826, + "acc_stderr": 0.01213988100628706, + "acc_norm": 0.34485006518904826, + "acc_norm_stderr": 0.01213988100628706 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.01623806506905961, + "mc2": 0.4910419282897384, + "mc2_stderr": 0.015891313216487672 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43565525383707204, + "acc_stderr": 0.01704741522947632, + "acc_norm": 0.46635182998819363, + "acc_norm_stderr": 0.01715138411713187 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "teknium/OpenHermes-2-Mistral-7B", + "model_sha": "2bb0b75442eeadc2da3035a6ada86e3953308ac3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tlphams/gollm-12.8b-instruct-v2.1/result_2023-11-01 04:43:20.json b/tlphams/gollm-12.8b-instruct-v2.1/result_2023-11-01 04:43:20.json new file mode 100644 index 0000000000000000000000000000000000000000..809d0ca94cdaa78cad07cb9ef127f9e2c1f28bdd --- /dev/null +++ b/tlphams/gollm-12.8b-instruct-v2.1/result_2023-11-01 04:43:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24914675767918087, + "acc_stderr": 0.012639407111926435, + "acc_norm": 0.29692832764505117, + "acc_norm_stderr": 0.013352025976725222 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3407687711611233, + "acc_stderr": 0.004729990807895056, + "acc_norm": 0.41804421429994026, + "acc_norm_stderr": 0.004922294797766664 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2848020434227331, + "acc_stderr": 0.016139174096522577, + "acc_norm": 0.2848020434227331, + "acc_norm_stderr": 0.016139174096522577 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.14814814814814814, + "acc_stderr": 0.03068864761035268, + "acc_norm": 0.14814814814814814, + "acc_norm_stderr": 0.03068864761035268 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.028504856470514196, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.028504856470514196 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740748, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740748 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188957, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188957 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596916, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596916 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277726, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277726 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28974358974358977, + "acc_stderr": 0.02300062824368797, + "acc_norm": 0.28974358974358977, + "acc_norm_stderr": 0.02300062824368797 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462843, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462843 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.02704685763071668, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.02704685763071668 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.19622641509433963, + "acc_stderr": 0.024442388131100817, + "acc_norm": 0.19622641509433963, + "acc_norm_stderr": 0.024442388131100817 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.17218543046357615, + "acc_stderr": 0.03082613696196239, + "acc_norm": 0.17218543046357615, + "acc_norm_stderr": 0.03082613696196239 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.031862098516411426, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.031862098516411426 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765134, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765134 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322004, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322004 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1963302752293578, + "acc_stderr": 0.01703071933915435, + "acc_norm": 0.1963302752293578, + "acc_norm_stderr": 0.01703071933915435 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736383, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266733, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266733 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697623, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697623 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.01431099954796147, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.01431099954796147 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681407, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681407 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.026711430555538422, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.026711430555538422 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842544, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842544 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.031660096793998116, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.031660096793998116 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.38968108562447634, + "mc2_stderr": 0.01596435745027793 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.016114023894800336, + "acc_norm": 0.3955135773317591, + "acc_norm_stderr": 0.016810815902206042 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tlphams/gollm-12.8b-instruct-v2.1", + "model_sha": "cf0ea50740783b1c6791e9fd8afdc9bdfebadf35", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tlphams/gollm-12.8b-instruct-v2.3/result_2023-11-08 03:54:19.json b/tlphams/gollm-12.8b-instruct-v2.3/result_2023-11-08 03:54:19.json new file mode 100644 index 0000000000000000000000000000000000000000..954bde025941cb6e7d3a1cc7739dd6220f2fb09b --- /dev/null +++ b/tlphams/gollm-12.8b-instruct-v2.3/result_2023-11-08 03:54:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24914675767918087, + "acc_stderr": 0.01263940711192643, + "acc_norm": 0.2960750853242321, + "acc_norm_stderr": 0.013340916085246271 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3414658434574786, + "acc_stderr": 0.004732322172153751, + "acc_norm": 0.4298944433379805, + "acc_norm_stderr": 0.004940490508240642 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.18128654970760233, + "acc_stderr": 0.029547741687640024, + "acc_norm": 0.18128654970760233, + "acc_norm_stderr": 0.029547741687640024 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.04721188506097171, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.04721188506097171 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20945083014048532, + "acc_stderr": 0.014551310568143698, + "acc_norm": 0.20945083014048532, + "acc_norm_stderr": 0.014551310568143698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334942, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064537, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064537 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.10762331838565023, + "acc_stderr": 0.020799400082880004, + "acc_norm": 0.10762331838565023, + "acc_norm_stderr": 0.020799400082880004 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.02845015479411863, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.02845015479411863 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.02271746789770861, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.02271746789770861 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023132376234543325, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023132376234543325 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3467889908256881, + "acc_stderr": 0.02040609710409303, + "acc_norm": 0.3467889908256881, + "acc_norm_stderr": 0.02040609710409303 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.02498710636564297, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.02498710636564297 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.22294654498044328, + "acc_stderr": 0.010630525747386089, + "acc_norm": 0.22294654498044328, + "acc_norm_stderr": 0.010630525747386089 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22766217870257038, + "mc1_stderr": 0.01467925503211107, + "mc2": 0.3871459989979698, + "mc2_stderr": 0.014973315901005473 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346463, + "acc_norm": 0.41086186540731995, + "acc_norm_stderr": 0.016914972767841062 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tlphams/gollm-12.8b-instruct-v2.3", + "model_sha": "3eb75691c0365fe231a8666291a6ae3178d88b4b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tlphams/gollm-instruct-all-in-one-v1/result_2023-10-04 06:14:50.json b/tlphams/gollm-instruct-all-in-one-v1/result_2023-10-04 06:14:50.json new file mode 100644 index 0000000000000000000000000000000000000000..571b9d0d93278cc2a5202231c808929ff67be409 --- /dev/null +++ b/tlphams/gollm-instruct-all-in-one-v1/result_2023-10-04 06:14:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25853242320819114, + "acc_stderr": 0.012794553754288677, + "acc_norm": 0.31569965870307165, + "acc_norm_stderr": 0.013582571095815293 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3498307110137423, + "acc_stderr": 0.004759416464201141, + "acc_norm": 0.43178649671380204, + "acc_norm_stderr": 0.0049431275832905125 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.15789473684210525, + "acc_stderr": 0.027966785859160872, + "acc_norm": 0.15789473684210525, + "acc_norm_stderr": 0.027966785859160872 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.014485656041669164, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.014485656041669164 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.13333333333333333, + "acc_stderr": 0.029365879728106822, + "acc_norm": 0.13333333333333333, + "acc_norm_stderr": 0.029365879728106822 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.028020226271200217, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.028020226271200217 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21084337349397592, + "acc_stderr": 0.03175554786629919, + "acc_norm": 0.21084337349397592, + "acc_norm_stderr": 0.03175554786629919 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.024406162094668882, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.024406162094668882 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2242152466367713, + "acc_stderr": 0.027991534258519527, + "acc_norm": 0.2242152466367713, + "acc_norm_stderr": 0.027991534258519527 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.030313710538198885, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.030313710538198885 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.0302839955258844, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.0302839955258844 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.02424378399406217, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.02424378399406217 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335137, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335137 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.02581923325648373, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.02581923325648373 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.025497532639609542, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.025497532639609542 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.035118075718047245, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.035118075718047245 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555401, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555401 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.020006075494524416, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.020006075494524416 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757177, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104284, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104284 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30825688073394497, + "acc_stderr": 0.019798366698367268, + "acc_norm": 0.30825688073394497, + "acc_norm_stderr": 0.019798366698367268 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046633, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046633 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.21487603305785125, + "acc_stderr": 0.03749492448709698, + "acc_norm": 0.21487603305785125, + "acc_norm_stderr": 0.03749492448709698 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21405228758169934, + "acc_stderr": 0.01659342966232903, + "acc_norm": 0.21405228758169934, + "acc_norm_stderr": 0.01659342966232903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.024847921358063962, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.024847921358063962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.03562367850095391, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.03562367850095391 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319464, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.02992310056368391, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.02992310056368391 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460295, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460295 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.22685788787483702, + "acc_stderr": 0.01069634813356993, + "acc_norm": 0.22685788787483702, + "acc_norm_stderr": 0.01069634813356993 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693247, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693247 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22643818849449204, + "mc1_stderr": 0.014651337324602602, + "mc2": 0.40483124709618634, + "mc2_stderr": 0.015674854564799542 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32113341204250295, + "acc_stderr": 0.016052762579111562, + "acc_norm": 0.36717827626918537, + "acc_norm_stderr": 0.016572727807458592 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tlphams/gollm-instruct-all-in-one-v1", + "model_sha": "44937fddb3168a387b55173371b365a0b280ae3e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tmdduq/komt-mistral-7b-v1-dpo-osy-v1/result_2023-12-24 15:10:29.json b/tmdduq/komt-mistral-7b-v1-dpo-osy-v1/result_2023-12-24 15:10:29.json new file mode 100644 index 0000000000000000000000000000000000000000..552ddd8a5dd2b4dc452bfbd57615ddb0f73cc989 --- /dev/null +++ b/tmdduq/komt-mistral-7b-v1-dpo-osy-v1/result_2023-12-24 15:10:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32849829351535836, + "acc_stderr": 0.013724978465537357, + "acc_norm": 0.3839590443686007, + "acc_norm_stderr": 0.01421244498065189 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36964748058155744, + "acc_stderr": 0.004817227292240288, + "acc_norm": 0.4805815574586736, + "acc_norm_stderr": 0.00498601693867853 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.031410821975962386, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.031410821975962386 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419995, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.024915243985987844, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.024915243985987844 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.028100964724272638, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.028100964724272638 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5024875621890548, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.5024875621890548, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165582, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165582 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548594, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548594 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.027002521034516475, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.027002521034516475 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46238532110091746, + "acc_stderr": 0.02137657527439757, + "acc_norm": 0.46238532110091746, + "acc_norm_stderr": 0.02137657527439757 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33169934640522875, + "acc_stderr": 0.01904748523936038, + "acc_norm": 0.33169934640522875, + "acc_norm_stderr": 0.01904748523936038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042405, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915206, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915206 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653062, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653062 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3005586592178771, + "acc_stderr": 0.015334566806251166, + "acc_norm": 0.3005586592178771, + "acc_norm_stderr": 0.015334566806251166 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.02833295951403122, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.02833295951403122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163906, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48523206751054854, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.48523206751054854, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3109517601043025, + "acc_stderr": 0.011822252917799203, + "acc_norm": 0.3109517601043025, + "acc_norm_stderr": 0.011822252917799203 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3402692778457772, + "mc1_stderr": 0.016586304901762557, + "mc2": 0.5212110856919681, + "mc2_stderr": 0.015656809915743582 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40731995277449823, + "acc_stderr": 0.01689245669519127, + "acc_norm": 0.4604486422668241, + "acc_norm_stderr": 0.017136487626049853 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tmdduq/komt-mistral-7b-v1-dpo-osy-v1", + "model_sha": "fe2121c7d75e73671e1f995dea7728ba8b6a4588", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vihangd/smartyplats-7b-v1/result_2023-10-21 10:56:03.json b/vihangd/smartyplats-7b-v1/result_2023-10-21 10:56:03.json new file mode 100644 index 0000000000000000000000000000000000000000..a463a2c6d5fa18ee83c23ab87d63786bad812351 --- /dev/null +++ b/vihangd/smartyplats-7b-v1/result_2023-10-21 10:56:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2781569965870307, + "acc_stderr": 0.0130944699195388, + "acc_norm": 0.30631399317406144, + "acc_norm_stderr": 0.013470584417276511 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33320055765783707, + "acc_stderr": 0.004703942346762255, + "acc_norm": 0.3875721967735511, + "acc_norm_stderr": 0.004862003566798538 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.01781438523853443, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.01781438523853443 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911522, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911522 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507383, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507383 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752045, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43853211009174314, + "acc_stderr": 0.021274713073954562, + "acc_norm": 0.43853211009174314, + "acc_norm_stderr": 0.021274713073954562 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786696, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786696 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878648, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878648 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2994413407821229, + "acc_stderr": 0.015318257745976708, + "acc_norm": 0.2994413407821229, + "acc_norm_stderr": 0.015318257745976708 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396563, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396563 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.03241920684693333, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.03241920684693333 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3376792698826597, + "acc_stderr": 0.012078563777145546, + "acc_norm": 0.3376792698826597, + "acc_norm_stderr": 0.012078563777145546 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.033540924375915195, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.033540924375915195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.45442787164664084, + "mc2_stderr": 0.016775457950621752 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4037780401416765, + "acc_stderr": 0.01686903154029863, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.01716156394991635 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vihangd/smartyplats-7b-v1", + "model_sha": "f9180b83a6e2051c5780d2ad336278226a3d425d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wkshin89/mistral-7b-instruct-ko-test-v0.1/result_2023-12-29 07:27:35.json b/wkshin89/mistral-7b-instruct-ko-test-v0.1/result_2023-12-29 07:27:35.json new file mode 100644 index 0000000000000000000000000000000000000000..39193de3e196dec34db2d14918c47149e0691735 --- /dev/null +++ b/wkshin89/mistral-7b-instruct-ko-test-v0.1/result_2023-12-29 07:27:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.0139289334613825, + "acc_norm": 0.38822525597269625, + "acc_norm_stderr": 0.01424161420741404 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37442740489942244, + "acc_stderr": 0.004829856058603582, + "acc_norm": 0.48894642501493724, + "acc_norm_stderr": 0.00498856194427739 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.01785998976517645, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.01785998976517645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.03175367846096626, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.03175367846096626 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883231, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883231 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.02987257770889118, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.02987257770889118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009812, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009812 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752045, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666654, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666654 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070434, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070434 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44954128440366975, + "acc_stderr": 0.021327881417823377, + "acc_norm": 0.44954128440366975, + "acc_norm_stderr": 0.021327881417823377 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.02838425670488304, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.02838425670488304 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981749, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981749 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.027971541370170598, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.027971541370170598 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.03210353032241268, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.03210353032241268 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3435462842242503, + "acc_stderr": 0.012128961174190156, + "acc_norm": 0.3435462842242503, + "acc_norm_stderr": 0.012128961174190156 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361005, + "mc2": 0.4595701455440173, + "mc2_stderr": 0.015464603150764228 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38016528925619836, + "acc_stderr": 0.016689333596980122, + "acc_norm": 0.4474616292798111, + "acc_norm_stderr": 0.017095190301500585 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wkshin89/mistral-7b-instruct-ko-test-v0.1", + "model_sha": "6c4c20b04e67dc4f0aa797b28ecf0f9a213370c3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wngkdud/llama2_DPO_test_v1/result_2023-11-20 08:26:17.json b/wngkdud/llama2_DPO_test_v1/result_2023-11-20 08:26:17.json new file mode 100644 index 0000000000000000000000000000000000000000..b96087c1d333215982a228b2096e0f3e4705d743 --- /dev/null +++ b/wngkdud/llama2_DPO_test_v1/result_2023-11-20 08:26:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.013760988200880534, + "acc_norm": 0.38822525597269625, + "acc_norm_stderr": 0.014241614207414047 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4018123879705238, + "acc_stderr": 0.004892624490937213, + "acc_norm": 0.5252937661820355, + "acc_norm_stderr": 0.004983392650570966 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.01781040392543536, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.01781040392543536 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339525, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339525 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562783, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562783 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123005, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003337, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003337 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3067226890756303, + "acc_stderr": 0.029953823891887055, + "acc_norm": 0.3067226890756303, + "acc_norm_stderr": 0.029953823891887055 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.02394672474156397, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.02394672474156397 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.0332085274234831, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.0332085274234831 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.029773082713319878, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.029773082713319878 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.03479185572599661, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.03479185572599661 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4577114427860697, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.4577114427860697, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577657, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.02642481659400985, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.02642481659400985 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39896373056994816, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.39896373056994816, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481425, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481425 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362237, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362237 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.0273053080762747, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.0273053080762747 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.045517111961042175, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.045517111961042175 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03782728980865469, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03782728980865469 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223974, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223974 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605603, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605603 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.02916273841024978, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.02916273841024978 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.011559337355708512, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.011559337355708512 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882455, + "mc2": 0.48120300528886556, + "mc2_stderr": 0.016615872220447157 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4344746162927981, + "acc_stderr": 0.017042098620824935, + "acc_norm": 0.448642266824085, + "acc_norm_stderr": 0.017099430514725792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wngkdud/llama2_DPO_test_v1", + "model_sha": "69af6cea4762a14fddbeeddb7f42375bd9a81181", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wngkdud/llama2_koen_13b_SFTtrain/result_2023-11-22 07:45:54.json b/wngkdud/llama2_koen_13b_SFTtrain/result_2023-11-22 07:45:54.json new file mode 100644 index 0000000000000000000000000000000000000000..9a645235fe191ab361d8c389da205c79e4aa04de --- /dev/null +++ b/wngkdud/llama2_koen_13b_SFTtrain/result_2023-11-22 07:45:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.013796182947785564, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693026 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38856801433977295, + "acc_stderr": 0.00486428617673183, + "acc_norm": 0.5029874526986656, + "acc_norm_stderr": 0.0049896923443139935 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48659003831417624, + "acc_stderr": 0.017873531736510392, + "acc_norm": 0.48659003831417624, + "acc_norm_stderr": 0.017873531736510392 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105908, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105908 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828065, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828065 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.0312821770636846, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.0312821770636846 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.024784316942156367, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.024784316942156367 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575494, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575494 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.027906150826041143, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.027906150826041143 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.03005258057955785, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.03005258057955785 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145644, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145644 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165582, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165582 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.026817718130348916, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.026817718130348916 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518752, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518752 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214338, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302895, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302895 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396573, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396573 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.510548523206751, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.510548523206751, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2926988265971317, + "acc_stderr": 0.011620949195849535, + "acc_norm": 0.2926988265971317, + "acc_norm_stderr": 0.011620949195849535 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719128, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719128 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431855, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431855 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4530197496571, + "mc2_stderr": 0.01640666508928725 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4155844155844156, + "acc_stderr": 0.01694358631307657, + "acc_norm": 0.45218417945690675, + "acc_norm_stderr": 0.017111567130916782 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wngkdud/llama2_koen_13b_SFTtrain", + "model_sha": "3d446bd9c006d91347daa92a69c6a876506c39bc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/Yi-6b-test-v0.1/result_2023-12-02 10:17:46.json b/wons/Yi-6b-test-v0.1/result_2023-12-02 10:17:46.json new file mode 100644 index 0000000000000000000000000000000000000000..8441523b3b6fb505df1f2b99a98cdcc3b634c869 --- /dev/null +++ b/wons/Yi-6b-test-v0.1/result_2023-12-02 10:17:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497723, + "acc_norm": 0.4104095563139932, + "acc_norm_stderr": 0.014374922192642666 + }, + "harness|ko_hellaswag|10": { + "acc": 0.398725353515236, + "acc_stderr": 0.004886353563571851, + "acc_norm": 0.5297749452300339, + "acc_norm_stderr": 0.004980926198798982 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.017844918090468547, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.017844918090468547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.032321469162244695, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.032321469162244695 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758396, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758396 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736118, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736118 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547307, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547307 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369818, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369818 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.03555300319557669, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.03555300319557669 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6146788990825688, + "acc_stderr": 0.02086585085279411, + "acc_norm": 0.6146788990825688, + "acc_norm_stderr": 0.02086585085279411 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829163, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829163 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293646, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293646 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452229, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452229 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4097167044022003, + "mc2_stderr": 0.014779447855728677 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5242030696576151, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5844155844155844, + "acc_norm_stderr": 0.01694358631307656 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/Yi-6b-test-v0.1", + "model_sha": "6d9f3acc8e9e02f0dc1457cfa67bf8f721850a63", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/llama2-13b-dpo-test-v0.2/result_2023-11-29 14:42:30.json b/wons/llama2-13b-dpo-test-v0.2/result_2023-11-29 14:42:30.json new file mode 100644 index 0000000000000000000000000000000000000000..928a0fb984d66c0f7019639f24d5ad9cd438a61a --- /dev/null +++ b/wons/llama2-13b-dpo-test-v0.2/result_2023-11-29 14:42:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3302047781569966, + "acc_stderr": 0.013743085603760427, + "acc_norm": 0.3796928327645051, + "acc_norm_stderr": 0.014182119866974872 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39852619000199163, + "acc_stderr": 0.0048859420408945585, + "acc_norm": 0.5236008763194583, + "acc_norm_stderr": 0.004984219681732655 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5057471264367817, + "acc_stderr": 0.017878782326129234, + "acc_norm": 0.5057471264367817, + "acc_norm_stderr": 0.017878782326129234 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231008, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231008 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41919191919191917, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.41919191919191917, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03960933549451207, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03960933549451207 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296542, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296542 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02534809746809784, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02534809746809784 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.03522865864099597, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.03522865864099597 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.02639410417764363, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.02639410417764363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283683, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283683 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.01931267606578657, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.01931267606578657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369923, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369923 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.49264705882352944, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.49264705882352944, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.0315123604467428, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.0315123604467428 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4978902953586498, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.4978902953586498, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3044328552803129, + "acc_stderr": 0.011752877592597577, + "acc_norm": 0.3044328552803129, + "acc_norm_stderr": 0.011752877592597577 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.033933885849584046, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.033933885849584046 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.37576499388004897, + "mc1_stderr": 0.016954584060214297, + "mc2": 0.5484649933900534, + "mc2_stderr": 0.015659278554917617 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6115702479338843, + "acc_stderr": 0.016756921571069422, + "acc_norm": 0.6458087367178277, + "acc_norm_stderr": 0.01644317574921476 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/llama2-13b-dpo-test-v0.2", + "model_sha": "05fff3bca94d8e55146c1aef68557eabb41e2051", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/llama2-13b-test-v0.1/result_2023-11-22 07:53:03.json b/wons/llama2-13b-test-v0.1/result_2023-11-22 07:53:03.json new file mode 100644 index 0000000000000000000000000000000000000000..1bcfde4b77becad0f8a3a397feb09c4decdf6152 --- /dev/null +++ b/wons/llama2-13b-test-v0.1/result_2023-11-22 07:53:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37457337883959047, + "acc_stderr": 0.014144193471893456, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306866 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42202748456482775, + "acc_stderr": 0.004928735103635848, + "acc_norm": 0.5662218681537542, + "acc_norm_stderr": 0.00494582405650181 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5300127713920817, + "acc_stderr": 0.017847723086649083, + "acc_norm": 0.5300127713920817, + "acc_norm_stderr": 0.017847723086649083 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111288, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111288 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102308, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102308 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406795, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406795 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.02800913812540039, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.02800913812540039 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.02143295620345333, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.02143295620345333 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.02830457667314112, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.02830457667314112 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828978, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828978 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3070404172099087, + "acc_stderr": 0.011780959114513778, + "acc_norm": 0.3070404172099087, + "acc_norm_stderr": 0.011780959114513778 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.42332436951734187, + "mc2_stderr": 0.014852154991640701 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998567, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.017090852631668336 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/llama2-13b-test-v0.1", + "model_sha": "7d81f655a9450c5b65eeeb3126373d7e08e8186f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/mistral-7B-test-v0.1/result_2023-11-22 14:04:32.json b/wons/mistral-7B-test-v0.1/result_2023-11-22 14:04:32.json new file mode 100644 index 0000000000000000000000000000000000000000..ea7506cbd198e674c3ed8aa7da23ef27d0bd2e0e --- /dev/null +++ b/wons/mistral-7B-test-v0.1/result_2023-11-22 14:04:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145687, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3843855805616411, + "acc_stderr": 0.004854555294017561, + "acc_norm": 0.4870543716391157, + "acc_norm_stderr": 0.004988108663179765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.01787924897058439, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.01787924897058439 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.041716541613545426, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.041716541613545426 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357794, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357794 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534795, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534795 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749465, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601684, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601684 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024113, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024113 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199506, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199506 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3128491620111732, + "acc_stderr": 0.015506892594647267, + "acc_norm": 0.3128491620111732, + "acc_norm_stderr": 0.015506892594647267 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.012002091666902312, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.012002091666902312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.01565960575532691, + "mc2": 0.45950713329073445, + "mc2_stderr": 0.015528962534409833 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5029515938606848, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/mistral-7B-test-v0.1", + "model_sha": "ad71e129d126732f7c2f08bdbf88cdfab5866e45", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/mistral-7B-test-v0.2/result_2023-11-29 03:56:01.json b/wons/mistral-7B-test-v0.2/result_2023-11-29 03:56:01.json new file mode 100644 index 0000000000000000000000000000000000000000..0f6cc40378385337ae49065c02cad8011db15b78 --- /dev/null +++ b/wons/mistral-7B-test-v0.2/result_2023-11-29 03:56:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.01384746051889298, + "acc_norm": 0.3703071672354949, + "acc_norm_stderr": 0.01411129875167495 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3637721569408484, + "acc_stderr": 0.004801009657690444, + "acc_norm": 0.4645488946425015, + "acc_norm_stderr": 0.0049772234853420255 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041975, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041975 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.028150232244535597, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.028150232244535597 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.0253480060315348, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.0253480060315348 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.02828632407556441, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.02828632407556441 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083004, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083004 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699947, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699947 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529424, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529424 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412236, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412236 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.01485499393801009, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.01485499393801009 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214933, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.037818873532059816, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.037818873532059816 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.016185744355144898, + "mc2": 0.4777339871786822, + "mc2_stderr": 0.015453835300523385 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4037780401416765, + "acc_stderr": 0.01686903154029863, + "acc_norm": 0.4332939787485242, + "acc_norm_stderr": 0.017036683641893098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/mistral-7B-test-v0.2", + "model_sha": "fa2a9ef5ec5670fa4bb3f590f1d08995ea498d24", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/mistral-7B-test-v0.3/result_2023-11-29 09:06:39.json b/wons/mistral-7B-test-v0.3/result_2023-11-29 09:06:39.json new file mode 100644 index 0000000000000000000000000000000000000000..5dc097d3766937d4c882fc5f6de2fdb25cb2c184 --- /dev/null +++ b/wons/mistral-7B-test-v0.3/result_2023-11-29 09:06:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32849829351535836, + "acc_stderr": 0.013724978465537368, + "acc_norm": 0.378839590443686, + "acc_norm_stderr": 0.014175915490000322 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35929097789285, + "acc_stderr": 0.004788120727316244, + "acc_norm": 0.4631547500497909, + "acc_norm_stderr": 0.0049762149894835035 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693353, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894248, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894248 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.0355580405176393, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.0355580405176393 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376536, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376536 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.02832774309156106, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.02832774309156106 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718323, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718323 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465066, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465066 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651281, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651281 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539284, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539284 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362227, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362227 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486635, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364555, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364555 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311172, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311172 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.031843998738112236, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.031843998738112236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34810951760104303, + "acc_stderr": 0.012166738993698191, + "acc_norm": 0.34810951760104303, + "acc_norm_stderr": 0.012166738993698191 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.016466769613698293, + "mc2": 0.5077406474283724, + "mc2_stderr": 0.015633659057840248 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3990554899645809, + "acc_stderr": 0.016836377292849296, + "acc_norm": 0.4474616292798111, + "acc_norm_stderr": 0.01709519030150058 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/mistral-7B-test-v0.3", + "model_sha": "899f2f796d3cb956b29a0a6a7463a912bd6f8367", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/tigerbot-13b-test-v0_1/result_2023-12-01 04:57:49.json b/wons/tigerbot-13b-test-v0_1/result_2023-12-01 04:57:49.json new file mode 100644 index 0000000000000000000000000000000000000000..ec6b127a54cbf02089b7d105abbf8d367bf5ed5c --- /dev/null +++ b/wons/tigerbot-13b-test-v0_1/result_2023-12-01 04:57:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24744027303754265, + "acc_stderr": 0.01261035266329267, + "acc_norm": 0.30119453924914674, + "acc_norm_stderr": 0.013406741767847626 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32085241983668594, + "acc_stderr": 0.0046585016622776206, + "acc_norm": 0.3835889265086636, + "acc_norm_stderr": 0.0048526588767753825 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.036310534964889056, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.036310534964889056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3933588761174968, + "acc_stderr": 0.01746855672450314, + "acc_norm": 0.3933588761174968, + "acc_norm_stderr": 0.01746855672450314 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.041716541613545426, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.041716541613545426 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36012861736334406, + "acc_stderr": 0.027264297599804015, + "acc_norm": 0.36012861736334406, + "acc_norm_stderr": 0.027264297599804015 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.031381476375754995, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.031381476375754995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185555, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185555 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362466, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362466 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3967741935483871, + "acc_stderr": 0.027831231605767948, + "acc_norm": 0.3967741935483871, + "acc_norm_stderr": 0.027831231605767948 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5170940170940171, + "acc_stderr": 0.032736940493481824, + "acc_norm": 0.5170940170940171, + "acc_norm_stderr": 0.032736940493481824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3471698113207547, + "acc_stderr": 0.02930010170554966, + "acc_norm": 0.3471698113207547, + "acc_norm_stderr": 0.02930010170554966 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3901734104046243, + "acc_stderr": 0.026261677607806653, + "acc_norm": 0.3901734104046243, + "acc_norm_stderr": 0.026261677607806653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470022, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470022 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662737, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.03567471335212541, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.03567471335212541 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583639, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583639 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3541284403669725, + "acc_stderr": 0.020504729013829107, + "acc_norm": 0.3541284403669725, + "acc_norm_stderr": 0.020504729013829107 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302895, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302895 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.01846315413263281, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.01846315413263281 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320207, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320207 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210746, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210746 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.024398192986654924, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.024398192986654924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460295, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460295 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2561929595827901, + "acc_stderr": 0.01114917315311058, + "acc_norm": 0.2561929595827901, + "acc_norm_stderr": 0.01114917315311058 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.032250781083062896, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.032250781083062896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.016322644182960498, + "mc2": 0.4847809791543606, + "mc2_stderr": 0.015949221320086037 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346463, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.016929480234495226 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/tigerbot-13b-test-v0_1", + "model_sha": "17a0e2d598004af5f685811bc1ef9ee980e56ee6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yeen214/llama2_7b_small_tuning_v1/result_2023-10-03 06:58:13.json b/yeen214/llama2_7b_small_tuning_v1/result_2023-10-03 06:58:13.json new file mode 100644 index 0000000000000000000000000000000000000000..2169cf6e3f6ce61affa2400bcef932950ebb446e --- /dev/null +++ b/yeen214/llama2_7b_small_tuning_v1/result_2023-10-03 06:58:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20563139931740615, + "acc_stderr": 0.011810745260742585, + "acc_norm": 0.25853242320819114, + "acc_norm_stderr": 0.012794553754288666 + }, + "harness|ko_hellaswag|10": { + "acc": 0.252141007767377, + "acc_stderr": 0.004333543083293473, + "acc_norm": 0.24278032264489147, + "acc_norm_stderr": 0.004278871104930363 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2720306513409962, + "acc_stderr": 0.015913367447500517, + "acc_norm": 0.2720306513409962, + "acc_norm_stderr": 0.015913367447500517 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039787, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663925, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663925 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.026236965881153266, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.026236965881153266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.026936111912802263, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.026936111912802263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124495, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124495 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376556, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376556 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.18067226890756302, + "acc_stderr": 0.024991964966600756, + "acc_norm": 0.18067226890756302, + "acc_norm_stderr": 0.024991964966600756 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.022421273612923714, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.022421273612923714 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642749, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642749 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22258064516129034, + "acc_stderr": 0.02366421667164252, + "acc_norm": 0.22258064516129034, + "acc_norm_stderr": 0.02366421667164252 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.028286324075564386, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.028286324075564386 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729602, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729602 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.022019080012217897, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.022019080012217897 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.024027745155265026, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.024027745155265026 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294688, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294688 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.02925282329180363, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.02925282329180363 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20550458715596331, + "acc_stderr": 0.01732435232501601, + "acc_norm": 0.20550458715596331, + "acc_norm_stderr": 0.01732435232501601 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.024288619466046095, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.024288619466046095 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932267, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932267 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3140495867768595, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.3140495867768595, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.01815287105153882, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.01815287105153882 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.02635806569888059, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.02635806569888059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.028353212866863445, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.028353212866863445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098426, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1875, + "acc_stderr": 0.023709788253811766, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.023709788253811766 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145294, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.01134599674353926, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.01134599674353926 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.03182231867647554, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.03182231867647554 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.21909424724602203, + "mc1_stderr": 0.014480038578757447, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08264462809917356, + "acc_stderr": 0.009466532659719994, + "acc_norm": 0.21959858323494688, + "acc_norm_stderr": 0.014232743085580271 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yeen214/llama2_7b_small_tuning_v1", + "model_sha": "3f9b43b4db2da4fe3785071dd52c9fc92aa0801d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yeen214/test_llama2_7b/result_2023-10-01 02:38:38.json b/yeen214/test_llama2_7b/result_2023-10-01 02:38:38.json new file mode 100644 index 0000000000000000000000000000000000000000..a5cf8e5dbe99c26910f9bf1098e79b741799abbb --- /dev/null +++ b/yeen214/test_llama2_7b/result_2023-10-01 02:38:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2713310580204778, + "acc_stderr": 0.012993807727545787, + "acc_norm": 0.310580204778157, + "acc_norm_stderr": 0.013522292098053055 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3331009759012149, + "acc_stderr": 0.004703590558552501, + "acc_norm": 0.41127265484963155, + "acc_norm_stderr": 0.004910588449330016 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36909323116219667, + "acc_stderr": 0.017256283109124613, + "acc_norm": 0.36909323116219667, + "acc_norm_stderr": 0.017256283109124613 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785138, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785138 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071857, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071857 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.02666441088693762, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.02666441088693762 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416545, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416545 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886845, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886845 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30512820512820515, + "acc_stderr": 0.023346335293325887, + "acc_norm": 0.30512820512820515, + "acc_norm_stderr": 0.023346335293325887 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.32903225806451614, + "acc_stderr": 0.02672949906834997, + "acc_norm": 0.32903225806451614, + "acc_norm_stderr": 0.02672949906834997 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49145299145299143, + "acc_stderr": 0.032751303000970296, + "acc_norm": 0.49145299145299143, + "acc_norm_stderr": 0.032751303000970296 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.028254200344438676, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.028254200344438676 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505415, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505415 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.033367670865679766, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.033367670865679766 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4577114427860697, + "acc_stderr": 0.03522865864099597, + "acc_norm": 0.4577114427860697, + "acc_norm_stderr": 0.03522865864099597 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37283236994219654, + "acc_stderr": 0.026033890613576288, + "acc_norm": 0.37283236994219654, + "acc_norm_stderr": 0.026033890613576288 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664742, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664742 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.36728395061728397, + "acc_stderr": 0.02682280175950789, + "acc_norm": 0.36728395061728397, + "acc_norm_stderr": 0.02682280175950789 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.03458816042181007, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.03458816042181007 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28807339449541286, + "acc_stderr": 0.019416445892636018, + "acc_norm": 0.28807339449541286, + "acc_norm_stderr": 0.019416445892636018 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283683, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283683 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351586, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351586 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.018690850273595284, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.018690850273595284 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.025336848563332386, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.025336848563332386 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301854, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301854 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2685788787483703, + "acc_stderr": 0.011320056629121734, + "acc_norm": 0.2685788787483703, + "acc_norm_stderr": 0.011320056629121734 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.036974422050315967, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.036974422050315967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006516, + "mc2": 0.4392204501367092, + "mc2_stderr": 0.01533439619345391 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27036599763872493, + "acc_stderr": 0.015270152942068405, + "acc_norm": 0.3530106257378985, + "acc_norm_stderr": 0.016430745982427126 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yeen214/test_llama2_7b", + "model_sha": "69a4886f51ed752216cdd7f41a584d14240126f9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yeen214/test_llama2_ko_7b/result_2023-10-03 06:58:46.json b/yeen214/test_llama2_ko_7b/result_2023-10-03 06:58:46.json new file mode 100644 index 0000000000000000000000000000000000000000..78fb38cbd290a2d6d0a9bd8dbe1fa5f1178b9fee --- /dev/null +++ b/yeen214/test_llama2_ko_7b/result_2023-10-03 06:58:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21416382252559726, + "acc_stderr": 0.011988383205966496, + "acc_norm": 0.257679180887372, + "acc_norm_stderr": 0.012780770562768409 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2524397530372436, + "acc_stderr": 0.004335243434486834, + "acc_norm": 0.25323640709022105, + "acc_norm_stderr": 0.004339764434219064 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393163, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393163 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24265644955300128, + "acc_stderr": 0.015329888940899894, + "acc_norm": 0.24265644955300128, + "acc_norm_stderr": 0.015329888940899894 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.03915450630414251, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.03915450630414251 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2170212765957447, + "acc_stderr": 0.026947483121496228, + "acc_norm": 0.2170212765957447, + "acc_norm_stderr": 0.026947483121496228 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.025583062489984824, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.025583062489984824 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21973094170403587, + "acc_stderr": 0.027790177064383602, + "acc_norm": 0.21973094170403587, + "acc_norm_stderr": 0.027790177064383602 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.20610687022900764, + "acc_stderr": 0.03547771004159462, + "acc_norm": 0.20610687022900764, + "acc_norm_stderr": 0.03547771004159462 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365914, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365914 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438014, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438014 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882392, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882392 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971545, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971545 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854932, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22167487684729065, + "acc_stderr": 0.029225575892489614, + "acc_norm": 0.22167487684729065, + "acc_norm_stderr": 0.029225575892489614 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.02468597928623996, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.02468597928623996 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.029343114798094472, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.029343114798094472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.026480357179895702, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.026480357179895702 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.17272727272727273, + "acc_stderr": 0.03620691833929219, + "acc_norm": 0.17272727272727273, + "acc_norm_stderr": 0.03620691833929219 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834838, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834838 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.03368762932259431, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.03368762932259431 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02141168439369418, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02141168439369418 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.02344582627654554, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.02344582627654554 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.02447722285613511, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.02447722285613511 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21834862385321102, + "acc_stderr": 0.017712600528722727, + "acc_norm": 0.21834862385321102, + "acc_norm_stderr": 0.017712600528722727 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906045, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906045 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.023929155517351298, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.023929155517351298 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302871, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302871 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2630718954248366, + "acc_stderr": 0.017812676542320657, + "acc_norm": 0.2630718954248366, + "acc_norm_stderr": 0.017812676542320657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090203, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298825, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298825 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369916, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369916 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.024231013370541087, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.024231013370541087 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.02768297952296023, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.02768297952296023 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25358539765319427, + "acc_stderr": 0.011111715336101138, + "acc_norm": 0.25358539765319427, + "acc_norm_stderr": 0.011111715336101138 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871112, + "mc2": 0.49817574202268433, + "mc2_stderr": 0.016860322660870557 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09208972845336481, + "acc_stderr": 0.009941270233798432, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.016068253615813967 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yeen214/test_llama2_ko_7b", + "model_sha": "45901e1d6ccb22f5ed8aec3f9dd366823fdd1c33", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yeheun/llama-2-koen-13b-v1.2/result_2023-11-09 07:16:51.json b/yeheun/llama-2-koen-13b-v1.2/result_2023-11-09 07:16:51.json new file mode 100644 index 0000000000000000000000000000000000000000..70c77832393dda48c59e6f24f31276c257dbfc2f --- /dev/null +++ b/yeheun/llama-2-koen-13b-v1.2/result_2023-11-09 07:16:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.45819112627986347, + "acc_norm_stderr": 0.014560220308714702 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4206333399721171, + "acc_stderr": 0.004926518439372259, + "acc_norm": 0.5676160127464649, + "acc_norm_stderr": 0.004943945069611462 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.043182754919779756, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.043182754919779756 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5811965811965812, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.5811965811965812, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138938, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138938 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.021436420955529424, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.021436420955529424 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.01943177567703731, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.01943177567703731 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859672, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859672 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085455, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085455 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766368, + "mc2": 0.4100851120970672, + "mc2_stderr": 0.014797143070922393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5029515938606848, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427125 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yeheun/llama-2-koen-13b-v1.2", + "model_sha": "cb9e8ff37d427ab588d666b5c6994498a10084de", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yeheun/llama-2-koen-13b-v1.3/result_2023-11-09 19:30:39.json b/yeheun/llama-2-koen-13b-v1.3/result_2023-11-09 19:30:39.json new file mode 100644 index 0000000000000000000000000000000000000000..fdd5d53de94942b453250786e36356cb68919f62 --- /dev/null +++ b/yeheun/llama-2-koen-13b-v1.3/result_2023-11-09 19:30:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3728668941979522, + "acc_stderr": 0.014131176760131176, + "acc_norm": 0.42406143344709896, + "acc_norm_stderr": 0.014441889627464398 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39852619000199163, + "acc_stderr": 0.004885942040894558, + "acc_norm": 0.5293766182035451, + "acc_norm_stderr": 0.004981161746388229 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.017870847506081734, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.017870847506081734 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.03348180017060306, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.03348180017060306 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41414141414141414, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.41414141414141414, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938156, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938156 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641087, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641087 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776285, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776285 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514566, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514566 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.037143259063020635, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.037143259063020635 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413324, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4055045871559633, + "acc_stderr": 0.021050997991896834, + "acc_norm": 0.4055045871559633, + "acc_norm_stderr": 0.021050997991896834 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424513, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424513 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.018950886770806315, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.018950886770806315 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150381, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150381 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456052, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456052 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335314, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163908, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163908 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4767932489451477, + "acc_stderr": 0.03251215201141018, + "acc_norm": 0.4767932489451477, + "acc_norm_stderr": 0.03251215201141018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30378096479791394, + "acc_stderr": 0.011745787720472462, + "acc_norm": 0.30378096479791394, + "acc_norm_stderr": 0.011745787720472462 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.03364487286088299, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.03364487286088299 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22276621787025705, + "mc1_stderr": 0.014566506961396754, + "mc2": 0.38125667821834136, + "mc2_stderr": 0.015042897939213158 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38488783943329397, + "acc_stderr": 0.016728579701498672, + "acc_norm": 0.4675324675324675, + "acc_norm_stderr": 0.017154073716682858 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yeheun/llama-2-koen-13b-v1.3", + "model_sha": "690d4d31210518d018e6be9b2a099bb7b2c60af5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yuntaeyang/Llama-2-ko-instruct-13B-kor-orca-lora/result_2023-10-31 05:26:07.json b/yuntaeyang/Llama-2-ko-instruct-13B-kor-orca-lora/result_2023-10-31 05:26:07.json new file mode 100644 index 0000000000000000000000000000000000000000..08b940a1e390d9534fd23dd82557f58865f3076b --- /dev/null +++ b/yuntaeyang/Llama-2-ko-instruct-13B-kor-orca-lora/result_2023-10-31 05:26:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225396, + "acc_norm": 0.4709897610921502, + "acc_norm_stderr": 0.014586776355294324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.424317864967138, + "acc_stderr": 0.004932289405608944, + "acc_norm": 0.5681139215295757, + "acc_norm_stderr": 0.0049432643398686525 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376556, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376556 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095495, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073835, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073835 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899207, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988334, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389178, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389178 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.01986115519382917, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.01986115519382917 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.02985526139348393, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.02985526139348393 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38005215123859193, + "acc_stderr": 0.012397328205137805, + "acc_norm": 0.38005215123859193, + "acc_norm_stderr": 0.012397328205137805 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842881, + "mc2": 0.4266761138294313, + "mc2_stderr": 0.014862773603692122 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45808736717827625, + "acc_stderr": 0.017129852117911147, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.017145715365486664 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yuntaeyang/Llama-2-ko-instruct-13B-kor-orca-lora", + "model_sha": "757d3a2c1ff1aa98b46727cf28922307e8a212ba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yuntaeyang/Orca-2-7b-lora-kor/result_2023-12-10 05:02:57.json b/yuntaeyang/Orca-2-7b-lora-kor/result_2023-12-10 05:02:57.json new file mode 100644 index 0000000000000000000000000000000000000000..c0ac7ebb6003c49568eff5c314f3b3afd7b80543 --- /dev/null +++ b/yuntaeyang/Orca-2-7b-lora-kor/result_2023-12-10 05:02:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2935153583617747, + "acc_stderr": 0.013307250444941124, + "acc_norm": 0.3447098976109215, + "acc_norm_stderr": 0.01388881628678211 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33031268671579367, + "acc_stderr": 0.0046936443572020495, + "acc_norm": 0.4053973312089225, + "acc_norm_stderr": 0.004899653704032836 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.40485312899106, + "acc_stderr": 0.017553246467720253, + "acc_norm": 0.40485312899106, + "acc_norm_stderr": 0.017553246467720253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611549, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611549 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.38263665594855306, + "acc_stderr": 0.027604689028581986, + "acc_norm": 0.38263665594855306, + "acc_norm_stderr": 0.027604689028581986 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330313, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330313 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.035476014940069356, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.035476014940069356 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.03175367846096625, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.03175367846096625 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.024243783994062185, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.024243783994062185 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.027430866579973474, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.027430866579973474 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.029946498567699948, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.029946498567699948 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112147, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112147 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.037466683254700206, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.037466683254700206 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005135, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.0282135041778241, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.0282135041778241 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775089, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775089 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3022875816993464, + "acc_stderr": 0.018579232711113877, + "acc_norm": 0.3022875816993464, + "acc_norm_stderr": 0.018579232711113877 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291517, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291517 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560524, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560524 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26256983240223464, + "acc_stderr": 0.014716824273017754, + "acc_norm": 0.26256983240223464, + "acc_norm_stderr": 0.014716824273017754 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144682, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144682 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32333767926988266, + "acc_stderr": 0.011946565758447202, + "acc_norm": 0.32333767926988266, + "acc_norm_stderr": 0.011946565758447202 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326912, + "mc2": 0.4497834339134106, + "mc2_stderr": 0.01565169575316926 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3482880755608028, + "acc_stderr": 0.016379926739148044, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.016819438642971408 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yuntaeyang/Orca-2-7b-lora-kor", + "model_sha": "2ab434d0f6cd093918fdab77ee7c24c2b40adffe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yuntaeyang/Yi-Ko-6B-lora/result_2023-12-27 10:01:00.json b/yuntaeyang/Yi-Ko-6B-lora/result_2023-12-27 10:01:00.json new file mode 100644 index 0000000000000000000000000000000000000000..ee38c1a2fb46c6e72d00aed7d3af3152170dc588 --- /dev/null +++ b/yuntaeyang/Yi-Ko-6B-lora/result_2023-12-27 10:01:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3720136518771331, + "acc_stderr": 0.014124597881844465, + "acc_norm": 0.4121160409556314, + "acc_norm_stderr": 0.014383915302225402 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39294961163114916, + "acc_stderr": 0.004874076250521576, + "acc_norm": 0.5160326628161721, + "acc_norm_stderr": 0.004987215542259674 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533253, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236784, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236784 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999935, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999935 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534795, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534795 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220385, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.02441923496681907, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.02441923496681907 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303128, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.636697247706422, + "acc_stderr": 0.020620603919625804, + "acc_norm": 0.636697247706422, + "acc_norm_stderr": 0.020620603919625804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.02858034106513829, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.02858034106513829 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.019886221037501876, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.019886221037501876 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787317, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787317 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29608938547486036, + "acc_stderr": 0.01526867731760228, + "acc_norm": 0.29608938547486036, + "acc_norm_stderr": 0.01526867731760228 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214938, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214938 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.038435669935887165, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.038435669935887165 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.01605899902610062, + "mc2": 0.44032674532682686, + "mc2_stderr": 0.015044649657068107 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5348288075560803, + "acc_stderr": 0.017148598015747422, + "acc_norm": 0.6068476977567887, + "acc_norm_stderr": 0.016793262801287075 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yuntaeyang/Yi-Ko-6B-lora", + "model_sha": "faf11c288d664426c5592f32741ff49ae20667c4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yuntaeyang/lion-7b-lora-kor/result_2023-12-15 04:03:18.json b/yuntaeyang/lion-7b-lora-kor/result_2023-12-15 04:03:18.json new file mode 100644 index 0000000000000000000000000000000000000000..0ee00b6c5910c5e0344869d5b027180733face8e --- /dev/null +++ b/yuntaeyang/lion-7b-lora-kor/result_2023-12-15 04:03:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2158703071672355, + "acc_stderr": 0.012022975360030662, + "acc_norm": 0.27559726962457337, + "acc_norm_stderr": 0.013057169655761834 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2523401712806214, + "acc_stderr": 0.004334676952703859, + "acc_norm": 0.24297948615813583, + "acc_norm_stderr": 0.004280062838446546 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803627, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803627 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.031546980450822305, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.031546980450822305 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378984, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.01476194517486267, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08500590318772137, + "acc_stderr": 0.009588452201257215, + "acc_norm": 0.21251475796930341, + "acc_norm_stderr": 0.014064703386174932 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yuntaeyang/lion-7b-lora-kor", + "model_sha": "e28cf28b0967a027f504fa87645616dcae67d502", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-llama2-koen-13b-v0.9.17/result_2023-12-06 13:30:05.json b/zomd/AISquare-Instruct-llama2-koen-13b-v0.9.17/result_2023-12-06 13:30:05.json new file mode 100644 index 0000000000000000000000000000000000000000..0d54fa13084abbe992e6cf04479d9dfef16b71a7 --- /dev/null +++ b/zomd/AISquare-Instruct-llama2-koen-13b-v0.9.17/result_2023-12-06 13:30:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39078498293515357, + "acc_stderr": 0.014258563880513785, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955265 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4239195379406493, + "acc_stderr": 0.0049316790599193755, + "acc_norm": 0.5747858992232623, + "acc_norm_stderr": 0.004933650697000603 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5210727969348659, + "acc_stderr": 0.017864076786212907, + "acc_norm": 0.5210727969348659, + "acc_norm_stderr": 0.017864076786212907 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876718, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876718 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102315, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317213, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317213 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043840985, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043840985 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099522, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099522 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051208, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051208 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.027634176689602653, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.027634176689602653 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.019659922493623333, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.019659922493623333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.0280459469420424, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.0280459469420424 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953195, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953195 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048231, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326905, + "mc2": 0.43914355060529525, + "mc2_stderr": 0.014998093589375303 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4462809917355372, + "acc_stderr": 0.017090852631668336, + "acc_norm": 0.58913813459268, + "acc_norm_stderr": 0.01691497276784105 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-llama2-koen-13b-v0.9.17", + "model_sha": "b35507f09656e2ba312bd9e0c491455aceee9a7e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-llama2-koen-13b-v0.9.19/result_2023-12-06 13:30:15.json b/zomd/AISquare-Instruct-llama2-koen-13b-v0.9.19/result_2023-12-06 13:30:15.json new file mode 100644 index 0000000000000000000000000000000000000000..7bf5fce07eda03017cd13b784b7a0eea8eaf7326 --- /dev/null +++ b/zomd/AISquare-Instruct-llama2-koen-13b-v0.9.19/result_2023-12-06 13:30:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.014312094557946704, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306866 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4344752041426011, + "acc_stderr": 0.004946748608271345, + "acc_norm": 0.5835490938060147, + "acc_norm_stderr": 0.004919626380645508 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.017802087135850308, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.017802087135850308 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479637, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479637 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.03979236637497412, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.03979236637497412 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686855, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432564, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939101, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030878, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030878 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760065, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760065 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777473, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777473 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866346, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866346 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.0278079901413202, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.0278079901413202 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933102, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214941, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214941 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.4624651498702573, + "mc2_stderr": 0.015067280556431393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45454545454545453, + "acc_stderr": 0.017119172208061504, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.017072525875563106 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-llama2-koen-13b-v0.9.19", + "model_sha": "280db5f17c8ff2e8b48af84c6dacad00bc5da667", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-yi-ko-6b-v0.9.27/result_2023-12-21 04:52:13.json b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.27/result_2023-12-21 04:52:13.json new file mode 100644 index 0000000000000000000000000000000000000000..e30907c597f9542e3baba474da1c940c96d90ce8 --- /dev/null +++ b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.27/result_2023-12-21 04:52:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042192, + "acc_norm": 0.44283276450511944, + "acc_norm_stderr": 0.014515573873348906 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41814379605656243, + "acc_stderr": 0.004922459820434776, + "acc_norm": 0.5555666201951802, + "acc_norm_stderr": 0.004958872288442148 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.01773647083780069, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.01773647083780069 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999937, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.02533900301010653, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.02533900301010653 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343118, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.02904560029061626, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.02904560029061626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.038016851045244604, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.038016851045244604 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149145, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149145 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723368, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723368 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831027, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831027 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6146788990825688, + "acc_stderr": 0.020865850852794108, + "acc_norm": 0.6146788990825688, + "acc_norm_stderr": 0.020865850852794108 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375387, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375387 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510906, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510906 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098431, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098431 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271486, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195986, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195986 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.46606263672117376, + "mc2_stderr": 0.01539063853395172 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5171192443919717, + "acc_stderr": 0.017180275246085633, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556224 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-yi-ko-6b-v0.9.27", + "model_sha": "5f39720e3ac0bcebcdeb3328044f8d85da2aa35c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-yi-ko-6b-v0.9.28/result_2023-12-21 04:52:20.json b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.28/result_2023-12-21 04:52:20.json new file mode 100644 index 0000000000000000000000000000000000000000..a07ff69241afd0fe669e86aac4146311a6d1adce --- /dev/null +++ b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.28/result_2023-12-21 04:52:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111728, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.01452122640562708 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4130651264688309, + "acc_stderr": 0.0049137803474988756, + "acc_norm": 0.5542720573590918, + "acc_norm_stderr": 0.004960299952519406 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5759897828863346, + "acc_stderr": 0.017672263329084222, + "acc_norm": 0.5759897828863346, + "acc_norm_stderr": 0.017672263329084222 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.02844341422643831, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.02844341422643831 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342654, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342654 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756653, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.020920058346111072, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.020920058346111072 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.019987809769482064, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.019987809769482064 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02728160834446941, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02728160834446941 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613541, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613541 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329883, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329883 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2994413407821229, + "acc_stderr": 0.015318257745976708, + "acc_norm": 0.2994413407821229, + "acc_norm_stderr": 0.015318257745976708 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846144, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846144 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.038517163193983926, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.038517163193983926 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.016466769613698293, + "mc2": 0.48174146468286283, + "mc2_stderr": 0.015361505993239164 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5478158205430933, + "acc_stderr": 0.017111567130916796, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.016929480234495226 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-yi-ko-6b-v0.9.28", + "model_sha": "ce572a092b5b580fb2a2e6c34bd038a6c1f209da", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-yi-ko-6b-v0.9.29/result_2023-12-22 04:28:57.json b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.29/result_2023-12-22 04:28:57.json new file mode 100644 index 0000000000000000000000000000000000000000..31c44519d35c454dc719043f3530c208f3aa3199 --- /dev/null +++ b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.29/result_2023-12-22 04:28:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111728, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580122 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41674965146385184, + "acc_stderr": 0.004920130733271777, + "acc_norm": 0.552778331009759, + "acc_norm_stderr": 0.004961904949171396 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5644955300127714, + "acc_stderr": 0.017730589927926598, + "acc_norm": 0.5644955300127714, + "acc_norm_stderr": 0.017730589927926598 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611549, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611549 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.03446897738659332, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.03446897738659332 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4948717948717949, + "acc_stderr": 0.025349672906838667, + "acc_norm": 0.4948717948717949, + "acc_norm_stderr": 0.025349672906838667 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255168, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255168 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983053, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983053 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.038818912133343826, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.038818912133343826 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.027801656212323674, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.027801656212323674 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.616580310880829, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.616580310880829, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6055045871559633, + "acc_stderr": 0.02095464210858749, + "acc_norm": 0.6055045871559633, + "acc_norm_stderr": 0.02095464210858749 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375383, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510906, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510906 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808848, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808848 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3161668839634941, + "acc_stderr": 0.011875780894386583, + "acc_norm": 0.3161668839634941, + "acc_norm_stderr": 0.011875780894386583 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.016058999026100623, + "mc2": 0.45625171579944035, + "mc2_stderr": 0.015423026113357257 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654273, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-yi-ko-6b-v0.9.29", + "model_sha": "caa53097be14c57d052d6d29841524fa96630892", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-yi-ko-6b-v0.9.30/result_2023-12-22 04:29:27.json b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.30/result_2023-12-22 04:29:27.json new file mode 100644 index 0000000000000000000000000000000000000000..8e73fb0af41ee4bb87d2da87132b55b95925cb11 --- /dev/null +++ b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.30/result_2023-12-22 04:29:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042194, + "acc_norm": 0.43856655290102387, + "acc_norm_stderr": 0.014500682618212862 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4140609440350528, + "acc_stderr": 0.004915524600627968, + "acc_norm": 0.5542720573590918, + "acc_norm_stderr": 0.0049602999525194084 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5798212005108557, + "acc_stderr": 0.017650651363078026, + "acc_norm": 0.5798212005108557, + "acc_norm_stderr": 0.017650651363078026 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999344, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999344 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756656, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756656 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833942, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105932, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105932 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291517, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291517 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510923, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510923 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859936, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859936 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.03121956944530185, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.03121956944530185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344208, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344208 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3243574051407589, + "mc1_stderr": 0.01638797677964794, + "mc2": 0.4709332458329521, + "mc2_stderr": 0.015392858575324787 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5596221959858324, + "acc_stderr": 0.017067699774312974, + "acc_norm": 0.6056670602125147, + "acc_norm_stderr": 0.016802090674893223 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-yi-ko-6b-v0.9.30", + "model_sha": "b51ca94bdc9879721faaa5c3759774c892fd15d8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file